fixing storage
This commit is contained in:
parent
d45093a355
commit
c58696f1c8
|
|
@ -62,7 +62,7 @@ class DefaultTiltChecker : TiltChecker {
|
||||||
override suspend fun analyze(input: PipelineInput): Instruction {
|
override suspend fun analyze(input: PipelineInput): Instruction {
|
||||||
|
|
||||||
val tolerance = 25f
|
val tolerance = 25f
|
||||||
var level_message = "Keep the phone straight"
|
var levelMessage = "Keep the phone straight"
|
||||||
|
|
||||||
Log.d("TiltChecker", "Device Roll: ${input.deviceRoll}, Device Pitch: ${input.devicePitch}, Device Azimuth: ${input.deviceAzimuth}")
|
Log.d("TiltChecker", "Device Roll: ${input.deviceRoll}, Device Pitch: ${input.devicePitch}, Device Azimuth: ${input.deviceAzimuth}")
|
||||||
|
|
||||||
|
|
@ -81,14 +81,14 @@ class DefaultTiltChecker : TiltChecker {
|
||||||
|
|
||||||
if (!isLevel) {
|
if (!isLevel) {
|
||||||
if (input.devicePitch > standardPitch) {
|
if (input.devicePitch > standardPitch) {
|
||||||
level_message = "Rotate the phone Right"
|
levelMessage = "Rotate the phone Right"
|
||||||
} else if (input.devicePitch < standardPitch) {
|
} else if (input.devicePitch < standardPitch) {
|
||||||
level_message = "Rotate the phone Left"
|
levelMessage = "Rotate the phone Left"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Instruction(
|
return Instruction(
|
||||||
message = if (isLevel) "Device is level" else level_message,
|
message = if (isLevel) "Device is level" else levelMessage,
|
||||||
isValid = isLevel,
|
isValid = isLevel,
|
||||||
result = TiltResult(input.deviceRoll, input.devicePitch, isLevel)
|
result = TiltResult(input.deviceRoll, input.devicePitch, isLevel)
|
||||||
)
|
)
|
||||||
|
|
@ -189,7 +189,8 @@ class TFLiteObjectDetector(context: Context) : ObjectDetector {
|
||||||
animalBounds = primary?.bounds,
|
animalBounds = primary?.bounds,
|
||||||
referenceObjects = refs,
|
referenceObjects = refs,
|
||||||
label = primary?.label,
|
label = primary?.label,
|
||||||
confidence = primary?.confidence ?: 0f
|
confidence = primary?.confidence ?: 0f,
|
||||||
|
segmentationMask = null // Initialize with null as detection step doesn't do segmentation
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
@ -302,7 +303,7 @@ class MockPoseAnalyzer : PoseAnalyzer {
|
||||||
return Instruction(
|
return Instruction(
|
||||||
message = if (valid) "Pose Correct" else "Adjust Position",
|
message = if (valid) "Pose Correct" else "Adjust Position",
|
||||||
isValid = valid,
|
isValid = valid,
|
||||||
result = detection
|
result = detection.copy(segmentationMask = mask) // Pass the mask in the result
|
||||||
)
|
)
|
||||||
|
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
||||||
|
|
@ -248,7 +248,8 @@ class CSVDataSource(
|
||||||
reproductiveStatus = row[INDEX_REPRO],
|
reproductiveStatus = row[INDEX_REPRO],
|
||||||
description = row[INDEX_DESC],
|
description = row[INDEX_DESC],
|
||||||
images = row[INDEX_IMAGES].split(";").filter { it.isNotBlank() },
|
images = row[INDEX_IMAGES].split(";").filter { it.isNotBlank() },
|
||||||
video = row[INDEX_VIDEO]
|
video = row[INDEX_VIDEO],
|
||||||
|
segmentedImages = row.getOrNull(INDEX_SEGMENTED_IMAGES)?.split(";")?.filter { it.isNotBlank() } ?: emptyList()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -314,6 +315,7 @@ class CSVDataSource(
|
||||||
row[INDEX_DESC] = d.description
|
row[INDEX_DESC] = d.description
|
||||||
row[INDEX_IMAGES] = d.images.joinToString(";")
|
row[INDEX_IMAGES] = d.images.joinToString(";")
|
||||||
row[INDEX_VIDEO] = d.video
|
row[INDEX_VIDEO] = d.video
|
||||||
|
row[INDEX_SEGMENTED_IMAGES] = d.segmentedImages.joinToString(";")
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -395,8 +397,9 @@ class CSVDataSource(
|
||||||
const val INDEX_RATING_TEAT_THICKNESS = 37
|
const val INDEX_RATING_TEAT_THICKNESS = 37
|
||||||
const val INDEX_RATING_MUSCULARITY = 38
|
const val INDEX_RATING_MUSCULARITY = 38
|
||||||
const val INDEX_RATING_BODY_COND_COMMENTS = 39
|
const val INDEX_RATING_BODY_COND_COMMENTS = 39
|
||||||
|
const val INDEX_SEGMENTED_IMAGES = 40
|
||||||
|
|
||||||
const val TOTAL_COLUMNS = 40
|
const val TOTAL_COLUMNS = 41
|
||||||
|
|
||||||
val HEADER = arrayOf(
|
val HEADER = arrayOf(
|
||||||
"ID", "Name", "Species", "Breed", "Sex", "Weight", "Age", "MilkYield",
|
"ID", "Name", "Species", "Breed", "Sex", "Weight", "Age", "MilkYield",
|
||||||
|
|
@ -407,7 +410,7 @@ class CSVDataSource(
|
||||||
"CentralLigament", "UdderDepth", "FrontTeatPosition", "TeatLength",
|
"CentralLigament", "UdderDepth", "FrontTeatPosition", "TeatLength",
|
||||||
"RearTeatPosition", "Locomotion", "BodyConditionScore", "HockDevelopment",
|
"RearTeatPosition", "Locomotion", "BodyConditionScore", "HockDevelopment",
|
||||||
"BoneStructure", "RearUdderWidth", "TeatThickness", "Muscularity",
|
"BoneStructure", "RearUdderWidth", "TeatThickness", "Muscularity",
|
||||||
"BodyConditionComments"
|
"BodyConditionComments", "SegmentedImages"
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -19,39 +19,112 @@ import kotlin.coroutines.resumeWithException
|
||||||
|
|
||||||
class SubjectSegmenterHelper(private val context: Context) {
|
class SubjectSegmenterHelper(private val context: Context) {
|
||||||
|
|
||||||
suspend fun segmentAndSave(inputUri: Uri): Uri? {
|
suspend fun segmentToBitmap(inputBitmap: Bitmap): Bitmap? {
|
||||||
return suspendCancellableCoroutine { continuation ->
|
return suspendCancellableCoroutine { continuation ->
|
||||||
try {
|
try {
|
||||||
val image = InputImage.fromFilePath(context, inputUri)
|
val image = InputImage.fromBitmap(inputBitmap, 0)
|
||||||
val options = SubjectSegmenterOptions.Builder()
|
val options = SubjectSegmenterOptions.Builder()
|
||||||
.enableForegroundBitmap()
|
.enableMultipleSubjects(
|
||||||
|
SubjectSegmenterOptions.SubjectResultOptions.Builder()
|
||||||
|
.enableSubjectBitmap()
|
||||||
|
.build()
|
||||||
|
)
|
||||||
.build()
|
.build()
|
||||||
val segmenter = SubjectSegmentation.getClient(options)
|
val segmenter = SubjectSegmentation.getClient(options)
|
||||||
|
|
||||||
segmenter.process(image)
|
segmenter.process(image)
|
||||||
.addOnSuccessListener { result ->
|
.addOnSuccessListener { result ->
|
||||||
val foreground = result.foregroundBitmap
|
val subjects = result.subjects
|
||||||
if (foreground != null) {
|
if (subjects.isNotEmpty()) {
|
||||||
|
// Find the largest subject
|
||||||
|
val mainSubject = subjects.maxByOrNull { it.width * it.height }
|
||||||
|
|
||||||
|
if (mainSubject != null && mainSubject.bitmap != null) {
|
||||||
try {
|
try {
|
||||||
val resultBitmap = Bitmap.createBitmap(
|
val resultBitmap = Bitmap.createBitmap(
|
||||||
foreground.width,
|
image.width,
|
||||||
foreground.height,
|
image.height,
|
||||||
Bitmap.Config.ARGB_8888
|
Bitmap.Config.ARGB_8888
|
||||||
)
|
)
|
||||||
val canvas = Canvas(resultBitmap)
|
val canvas = Canvas(resultBitmap)
|
||||||
canvas.drawColor(Color.BLACK)
|
canvas.drawColor(Color.BLACK)
|
||||||
canvas.drawBitmap(foreground, 0f, 0f, null)
|
|
||||||
|
|
||||||
val originalName = getFileName(inputUri) ?: "image"
|
val subjectBitmap = mainSubject.bitmap!!
|
||||||
val nameWithoutExt = originalName.substringBeforeLast('.')
|
canvas.drawBitmap(
|
||||||
val baseName = nameWithoutExt.replace(Regex("_segmented.*"), "")
|
subjectBitmap,
|
||||||
val filename = "${baseName}_segmented_${System.currentTimeMillis()}.jpg"
|
mainSubject.startX.toFloat(),
|
||||||
|
mainSubject.startY.toFloat(),
|
||||||
|
null
|
||||||
|
)
|
||||||
|
continuation.resume(resultBitmap)
|
||||||
|
} catch (e: Exception) {
|
||||||
|
continuation.resumeWithException(e)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
continuation.resume(null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
continuation.resume(null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.addOnFailureListener { e ->
|
||||||
|
continuation.resumeWithException(e)
|
||||||
|
}
|
||||||
|
.addOnCompleteListener {
|
||||||
|
segmenter.close()
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
continuation.resumeWithException(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
suspend fun segmentAndSave(inputBitmap: Bitmap, animalId: String, orientation: String, subFolder: String? = null): Uri? {
|
||||||
|
return suspendCancellableCoroutine { continuation ->
|
||||||
|
try {
|
||||||
|
val image = InputImage.fromBitmap(inputBitmap, 0)
|
||||||
|
val options = SubjectSegmenterOptions.Builder()
|
||||||
|
.enableMultipleSubjects(
|
||||||
|
SubjectSegmenterOptions.SubjectResultOptions.Builder()
|
||||||
|
.enableSubjectBitmap()
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
val segmenter = SubjectSegmentation.getClient(options)
|
||||||
|
|
||||||
|
segmenter.process(image)
|
||||||
|
.addOnSuccessListener { result ->
|
||||||
|
val subjects = result.subjects
|
||||||
|
if (subjects.isNotEmpty()) {
|
||||||
|
// Find the largest subject
|
||||||
|
val mainSubject = subjects.maxByOrNull { it.width * it.height }
|
||||||
|
|
||||||
|
if (mainSubject != null && mainSubject.bitmap != null) {
|
||||||
|
try {
|
||||||
|
val resultBitmap = Bitmap.createBitmap(
|
||||||
|
image.width,
|
||||||
|
image.height,
|
||||||
|
Bitmap.Config.ARGB_8888
|
||||||
|
)
|
||||||
|
val canvas = Canvas(resultBitmap)
|
||||||
|
canvas.drawColor(Color.BLACK)
|
||||||
|
|
||||||
|
val subjectBitmap = mainSubject.bitmap!!
|
||||||
|
canvas.drawBitmap(
|
||||||
|
subjectBitmap,
|
||||||
|
mainSubject.startX.toFloat(),
|
||||||
|
mainSubject.startY.toFloat(),
|
||||||
|
null
|
||||||
|
)
|
||||||
|
|
||||||
|
val filename = "${animalId}_${orientation}_segmented.jpg"
|
||||||
|
|
||||||
val contentValues = ContentValues().apply {
|
val contentValues = ContentValues().apply {
|
||||||
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
|
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
|
||||||
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
|
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
|
||||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||||
put(MediaStore.MediaColumns.RELATIVE_PATH, "Pictures/LivingAI/Segmented")
|
val path = if (subFolder != null) "Pictures/LivingAI/$animalId/$subFolder" else "Pictures/LivingAI/$animalId"
|
||||||
|
put(MediaStore.MediaColumns.RELATIVE_PATH, path)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -72,6 +145,91 @@ class SubjectSegmenterHelper(private val context: Context) {
|
||||||
} else {
|
} else {
|
||||||
continuation.resume(null)
|
continuation.resume(null)
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
continuation.resume(null)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.addOnFailureListener { e ->
|
||||||
|
continuation.resumeWithException(e)
|
||||||
|
}
|
||||||
|
.addOnCompleteListener {
|
||||||
|
segmenter.close()
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
continuation.resumeWithException(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
suspend fun segmentAndSave(inputUri: Uri, animalId: String, orientation: String, subFolder: String? = null): Uri? {
|
||||||
|
return suspendCancellableCoroutine { continuation ->
|
||||||
|
try {
|
||||||
|
val image = InputImage.fromFilePath(context, inputUri)
|
||||||
|
val options = SubjectSegmenterOptions.Builder()
|
||||||
|
.enableMultipleSubjects(
|
||||||
|
SubjectSegmenterOptions.SubjectResultOptions.Builder()
|
||||||
|
.enableSubjectBitmap()
|
||||||
|
.build()
|
||||||
|
)
|
||||||
|
.build()
|
||||||
|
val segmenter = SubjectSegmentation.getClient(options)
|
||||||
|
|
||||||
|
segmenter.process(image)
|
||||||
|
.addOnSuccessListener { result ->
|
||||||
|
val subjects = result.subjects
|
||||||
|
if (subjects.isNotEmpty()) {
|
||||||
|
// Find the largest subject (assuming it's the one in front/main subject)
|
||||||
|
val mainSubject = subjects.maxByOrNull { it.width * it.height }
|
||||||
|
|
||||||
|
if (mainSubject != null && mainSubject.bitmap != null) {
|
||||||
|
try {
|
||||||
|
val resultBitmap = Bitmap.createBitmap(
|
||||||
|
image.width,
|
||||||
|
image.height,
|
||||||
|
Bitmap.Config.ARGB_8888
|
||||||
|
)
|
||||||
|
val canvas = Canvas(resultBitmap)
|
||||||
|
canvas.drawColor(Color.BLACK)
|
||||||
|
|
||||||
|
val subjectBitmap = mainSubject.bitmap!!
|
||||||
|
canvas.drawBitmap(
|
||||||
|
subjectBitmap,
|
||||||
|
mainSubject.startX.toFloat(),
|
||||||
|
mainSubject.startY.toFloat(),
|
||||||
|
null
|
||||||
|
)
|
||||||
|
|
||||||
|
val filename = "${animalId}_${orientation}_segmented.jpg"
|
||||||
|
|
||||||
|
val contentValues = ContentValues().apply {
|
||||||
|
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
|
||||||
|
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
|
||||||
|
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
|
||||||
|
val path = if (subFolder != null) "Pictures/LivingAI/$animalId/$subFolder" else "Pictures/LivingAI/$animalId"
|
||||||
|
put(MediaStore.MediaColumns.RELATIVE_PATH, path)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val uri = context.contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, contentValues)
|
||||||
|
|
||||||
|
if (uri != null) {
|
||||||
|
val outputStream: OutputStream? = context.contentResolver.openOutputStream(uri)
|
||||||
|
outputStream?.use { out ->
|
||||||
|
resultBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
|
||||||
|
}
|
||||||
|
continuation.resume(uri)
|
||||||
|
} else {
|
||||||
|
continuation.resume(null)
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
continuation.resumeWithException(e)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
continuation.resume(null)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
continuation.resume(null)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
.addOnFailureListener { e ->
|
.addOnFailureListener { e ->
|
||||||
continuation.resumeWithException(e)
|
continuation.resumeWithException(e)
|
||||||
|
|
|
||||||
|
|
@ -13,5 +13,6 @@ data class AnimalDetails(
|
||||||
val reproductiveStatus: String,
|
val reproductiveStatus: String,
|
||||||
val description: String,
|
val description: String,
|
||||||
val images: List<String>,
|
val images: List<String>,
|
||||||
val video: String
|
val video: String,
|
||||||
|
val segmentedImages: List<String> = emptyList()
|
||||||
)
|
)
|
||||||
|
|
|
||||||
|
|
@ -38,7 +38,8 @@ data class DetectionResult(
|
||||||
val animalBounds: RectF?,
|
val animalBounds: RectF?,
|
||||||
val referenceObjects: List<ReferenceObject>,
|
val referenceObjects: List<ReferenceObject>,
|
||||||
val label: String? = null,
|
val label: String? = null,
|
||||||
val confidence: Float = 0f
|
val confidence: Float = 0f,
|
||||||
|
val segmentationMask: ByteArray? = null
|
||||||
) : AnalysisResult
|
) : AnalysisResult
|
||||||
|
|
||||||
data class PoseResult(
|
data class PoseResult(
|
||||||
|
|
|
||||||
|
|
@ -100,7 +100,6 @@ fun AddProfileScreen(
|
||||||
val ageFocus = remember { FocusRequester() }
|
val ageFocus = remember { FocusRequester() }
|
||||||
val milkYieldFocus = remember { FocusRequester() }
|
val milkYieldFocus = remember { FocusRequester() }
|
||||||
val calvingNumberFocus = remember { FocusRequester() }
|
val calvingNumberFocus = remember { FocusRequester() }
|
||||||
val reproductiveStatusFocus = remember { FocusRequester() } // Probably not useful for RadioGroup but good for consistency
|
|
||||||
|
|
||||||
// Auto-focus logic on error
|
// Auto-focus logic on error
|
||||||
LaunchedEffect(speciesError, breedError, ageError, milkYieldError, calvingNumberError, reproductiveStatusError) {
|
LaunchedEffect(speciesError, breedError, ageError, milkYieldError, calvingNumberError, reproductiveStatusError) {
|
||||||
|
|
|
||||||
|
|
@ -60,6 +60,9 @@ class AddProfileViewModel(
|
||||||
private val _videoUri = mutableStateOf<String?>(null)
|
private val _videoUri = mutableStateOf<String?>(null)
|
||||||
val videoUri: State<String?> = _videoUri
|
val videoUri: State<String?> = _videoUri
|
||||||
|
|
||||||
|
// State for segmented images
|
||||||
|
val segmentedImages = mutableListOf<String>()
|
||||||
|
|
||||||
fun loadAnimal(animalId: String?) {
|
fun loadAnimal(animalId: String?) {
|
||||||
if (animalId == null) {
|
if (animalId == null) {
|
||||||
val newId = IdGenerator.generateAnimalId()
|
val newId = IdGenerator.generateAnimalId()
|
||||||
|
|
@ -77,6 +80,7 @@ class AddProfileViewModel(
|
||||||
clearErrors()
|
clearErrors()
|
||||||
|
|
||||||
photos.clear()
|
photos.clear()
|
||||||
|
segmentedImages.clear()
|
||||||
_videoUri.value = null
|
_videoUri.value = null
|
||||||
} else {
|
} else {
|
||||||
_currentAnimalId.value = animalId
|
_currentAnimalId.value = animalId
|
||||||
|
|
@ -96,6 +100,9 @@ class AddProfileViewModel(
|
||||||
|
|
||||||
// Populate photos
|
// Populate photos
|
||||||
photos.clear()
|
photos.clear()
|
||||||
|
segmentedImages.clear()
|
||||||
|
segmentedImages.addAll(details.segmentedImages)
|
||||||
|
|
||||||
// Process images on IO thread as it may involve DB queries
|
// Process images on IO thread as it may involve DB queries
|
||||||
withContext(dispatchers.io) {
|
withContext(dispatchers.io) {
|
||||||
val photoMap = mutableMapOf<String, String>()
|
val photoMap = mutableMapOf<String, String>()
|
||||||
|
|
@ -104,39 +111,30 @@ class AddProfileViewModel(
|
||||||
val filename = getFileName(uri) ?: path.substringAfterLast('/')
|
val filename = getFileName(uri) ?: path.substringAfterLast('/')
|
||||||
val nameWithoutExt = filename.substringBeforeLast('.')
|
val nameWithoutExt = filename.substringBeforeLast('.')
|
||||||
|
|
||||||
|
// Skip segmented images for the main thumbnails
|
||||||
|
if (nameWithoutExt.contains("segmented", ignoreCase = true)) {
|
||||||
|
return@forEach
|
||||||
|
}
|
||||||
|
|
||||||
// Find orientation in filename
|
// Find orientation in filename
|
||||||
var foundOrientation: String? = null
|
var foundOrientation: String? = null
|
||||||
for (o in Constants.silhouetteList) {
|
for (o in Constants.silhouetteList) {
|
||||||
// Check if filename contains the orientation string
|
|
||||||
// We use ignoreCase just in case, though Constants are lowercase
|
|
||||||
if (nameWithoutExt.contains(o, ignoreCase = true)) {
|
if (nameWithoutExt.contains(o, ignoreCase = true)) {
|
||||||
// If we found a match, we verify it's not a substring of another word if possible,
|
|
||||||
// but here the orientations are quite distinct (front, back, left, right, angleview).
|
|
||||||
// To be safer, we could check for delimiters, but usually containment is enough for now.
|
|
||||||
foundOrientation = o
|
foundOrientation = o
|
||||||
// Prioritize exact matches or longer matches if necessary?
|
|
||||||
// "left" is in "leftangle". "leftangle" should be matched first if we iterate in order?
|
|
||||||
// Constants list: front, back, left, right, leftangle...
|
|
||||||
// If file is "leftangle", it matches "left".
|
|
||||||
// We should probably check longer keys first or exact match between delimiters.
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Better approach: Split by underscore and check exact match against list
|
|
||||||
val parts = nameWithoutExt.split('_')
|
val parts = nameWithoutExt.split('_')
|
||||||
val matchingPart = parts.find { part ->
|
val matchingPart = parts.find { part ->
|
||||||
Constants.silhouetteList.any { it.equals(part, ignoreCase = true) }
|
Constants.silhouetteList.any { it.equals(part, ignoreCase = true) }
|
||||||
}
|
}
|
||||||
|
|
||||||
if (matchingPart != null) {
|
if (matchingPart != null) {
|
||||||
// Normalize to the key in Constants (lowercase)
|
|
||||||
val key = Constants.silhouetteList.find { it.equals(matchingPart, ignoreCase = true) }
|
val key = Constants.silhouetteList.find { it.equals(matchingPart, ignoreCase = true) }
|
||||||
if (key != null) {
|
if (key != null) {
|
||||||
photoMap[key] = path
|
photoMap[key] = path
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Fallback to substring search if underscore splitting fails (e.g. if naming changed)
|
|
||||||
// We sort by length descending so "leftangle" is checked before "left"
|
|
||||||
val sortedOrientations = Constants.silhouetteList.sortedByDescending { it.length }
|
val sortedOrientations = Constants.silhouetteList.sortedByDescending { it.length }
|
||||||
val match = sortedOrientations.find { nameWithoutExt.contains(it, ignoreCase = true) }
|
val match = sortedOrientations.find { nameWithoutExt.contains(it, ignoreCase = true) }
|
||||||
if (match != null) {
|
if (match != null) {
|
||||||
|
|
@ -194,6 +192,12 @@ class AddProfileViewModel(
|
||||||
photos[orientation] = uri
|
photos[orientation] = uri
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun addSegmentedImage(uri: String) {
|
||||||
|
if (!segmentedImages.contains(uri)) {
|
||||||
|
segmentedImages.add(uri)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fun setVideo(uri: String) {
|
fun setVideo(uri: String) {
|
||||||
_videoUri.value = uri
|
_videoUri.value = uri
|
||||||
}
|
}
|
||||||
|
|
@ -265,6 +269,7 @@ class AddProfileViewModel(
|
||||||
description = description.value,
|
description = description.value,
|
||||||
images = photos.values.toList(),
|
images = photos.values.toList(),
|
||||||
video = _videoUri.value ?: "",
|
video = _videoUri.value ?: "",
|
||||||
|
segmentedImages = segmentedImages.toList(),
|
||||||
name = "", sex = "", weight = 0
|
name = "", sex = "", weight = 0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -250,15 +250,10 @@ fun ActiveCameraScreen(
|
||||||
contentScale = ContentScale.Fit
|
contentScale = ContentScale.Fit
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (silhouetteData != null) {
|
if (silhouetteData != null) {
|
||||||
|
|
||||||
if (silhouetteData.signedMask?.debugBitmap != null) {
|
if (silhouetteData.signedMask?.debugBitmap != null) {
|
||||||
|
|
||||||
val bbox = silhouetteData.boundingBox
|
val bbox = silhouetteData.boundingBox
|
||||||
|
|
||||||
Box(modifier = Modifier.fillMaxSize()) {
|
Box(modifier = Modifier.fillMaxSize()) {
|
||||||
|
|
||||||
// Bounding box outline (same coordinate system)
|
// Bounding box outline (same coordinate system)
|
||||||
Canvas(modifier = Modifier.fillMaxSize()) {
|
Canvas(modifier = Modifier.fillMaxSize()) {
|
||||||
drawRect(
|
drawRect(
|
||||||
|
|
@ -270,10 +265,22 @@ fun ActiveCameraScreen(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
uiState.lastDetectionResult?.let { detection ->
|
||||||
|
if (detection.segmentationMask != null) {
|
||||||
|
SegmentationOverlay(
|
||||||
|
mask = detection.segmentationMask!!,
|
||||||
|
animalBounds = detection.animalBounds,
|
||||||
|
imageWidth = analysisImageSize.width.toInt(),
|
||||||
|
imageHeight = analysisImageSize.height.toInt(),
|
||||||
|
modifier = Modifier.fillMaxSize()
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Overlays
|
// Overlays
|
||||||
uiState.currentInstruction?.let { instruction ->
|
uiState.currentInstruction?.let { instruction ->
|
||||||
InstructionOverlay(
|
InstructionOverlay(
|
||||||
|
|
@ -295,6 +302,74 @@ fun ActiveCameraScreen(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Composable
|
||||||
|
fun SegmentationOverlay(
|
||||||
|
mask: ByteArray,
|
||||||
|
animalBounds: RectF?,
|
||||||
|
imageWidth: Int,
|
||||||
|
imageHeight: Int,
|
||||||
|
modifier: Modifier
|
||||||
|
) {
|
||||||
|
if (animalBounds == null) return
|
||||||
|
|
||||||
|
Canvas(modifier = modifier) {
|
||||||
|
val canvasWidth = size.width
|
||||||
|
val canvasHeight = size.height
|
||||||
|
|
||||||
|
val widthRatio = canvasWidth / imageWidth
|
||||||
|
val heightRatio = canvasHeight / imageHeight
|
||||||
|
val scale = max(widthRatio, heightRatio)
|
||||||
|
|
||||||
|
val offsetX = (canvasWidth - imageWidth * scale) / 2f
|
||||||
|
val offsetY = (canvasHeight - imageHeight * scale) / 2f
|
||||||
|
|
||||||
|
// The mask corresponds to the cropped and resized area of the silhouette,
|
||||||
|
// but here we are receiving the raw mask from MockPoseAnalyzer which seems to match the resized bitmap
|
||||||
|
// used for comparison (silhouette.croppedBitmap size).
|
||||||
|
// However, MockPoseAnalyzer.segment returns a mask of size `bitmap.width * bitmap.height`
|
||||||
|
// where `bitmap` is the resized crop.
|
||||||
|
|
||||||
|
// Wait, looking at MockPoseAnalyzer.analyze:
|
||||||
|
// 1. crops image to animalBounds
|
||||||
|
// 2. resizes crop to silhouette.croppedBitmap dimensions
|
||||||
|
// 3. segments resized crop -> mask
|
||||||
|
|
||||||
|
// So the mask is small (e.g. 100x100). We need to draw it scaled up to the animalBounds on screen.
|
||||||
|
|
||||||
|
val boxLeft = animalBounds.left * scale + offsetX
|
||||||
|
val boxTop = animalBounds.top * scale + offsetY
|
||||||
|
val boxWidth = animalBounds.width() * scale
|
||||||
|
val boxHeight = animalBounds.height() * scale
|
||||||
|
|
||||||
|
// We need to know the dimensions of the mask grid to draw it properly.
|
||||||
|
// Since we don't pass dimensions, we can infer if it's square or pass it.
|
||||||
|
// Assuming square for simplicity as per SilhouetteManager usually?
|
||||||
|
// Actually, we can just draw points.
|
||||||
|
|
||||||
|
val maskSize = kotlin.math.sqrt(mask.size.toDouble()).toInt()
|
||||||
|
// Ideally we should pass width/height of the mask.
|
||||||
|
// For now let's assume the mask matches the aspect ratio of the box or is just a grid.
|
||||||
|
|
||||||
|
if (maskSize > 0) {
|
||||||
|
val pixelW = boxWidth / maskSize
|
||||||
|
val pixelH = boxHeight / maskSize
|
||||||
|
|
||||||
|
for (y in 0 until maskSize) {
|
||||||
|
for (x in 0 until maskSize) {
|
||||||
|
val index = y * maskSize + x
|
||||||
|
if (index < mask.size && mask[index] == 1.toByte()) {
|
||||||
|
drawRect(
|
||||||
|
color = Color.Green.copy(alpha = 0.5f),
|
||||||
|
topLeft = Offset(boxLeft + x * pixelW, boxTop + y * pixelH),
|
||||||
|
size = Size(pixelW, pixelH)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Composable
|
@Composable
|
||||||
fun InstructionOverlay(
|
fun InstructionOverlay(
|
||||||
instruction: Instruction,
|
instruction: Instruction,
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,7 @@
|
||||||
package com.example.livingai.pages.camera
|
package com.example.livingai.pages.camera
|
||||||
|
|
||||||
|
import android.graphics.Bitmap
|
||||||
|
import android.graphics.BitmapFactory
|
||||||
import android.net.Uri
|
import android.net.Uri
|
||||||
import androidx.compose.foundation.Canvas
|
import androidx.compose.foundation.Canvas
|
||||||
import androidx.compose.foundation.Image
|
import androidx.compose.foundation.Image
|
||||||
|
|
@ -34,7 +36,9 @@ import androidx.exifinterface.media.ExifInterface
|
||||||
import coil.compose.rememberAsyncImagePainter
|
import coil.compose.rememberAsyncImagePainter
|
||||||
import com.example.livingai.domain.ml.SubjectSegmenterHelper
|
import com.example.livingai.domain.ml.SubjectSegmenterHelper
|
||||||
import com.example.livingai.ui.theme.LivingAITheme
|
import com.example.livingai.ui.theme.LivingAITheme
|
||||||
|
import kotlinx.coroutines.Dispatchers
|
||||||
import kotlinx.coroutines.launch
|
import kotlinx.coroutines.launch
|
||||||
|
import kotlinx.coroutines.withContext
|
||||||
import kotlin.math.min
|
import kotlin.math.min
|
||||||
|
|
||||||
@Composable
|
@Composable
|
||||||
|
|
@ -44,6 +48,9 @@ fun ViewImageScreen(
|
||||||
showAccept: Boolean,
|
showAccept: Boolean,
|
||||||
showBack: Boolean,
|
showBack: Boolean,
|
||||||
showSegment: Boolean = false,
|
showSegment: Boolean = false,
|
||||||
|
isSegmented: Boolean = false,
|
||||||
|
animalId: String,
|
||||||
|
orientation: String? = null,
|
||||||
onRetake: () -> Unit,
|
onRetake: () -> Unit,
|
||||||
onAccept: (String) -> Unit,
|
onAccept: (String) -> Unit,
|
||||||
onSegmented: (String) -> Unit = {},
|
onSegmented: (String) -> Unit = {},
|
||||||
|
|
@ -55,6 +62,10 @@ fun ViewImageScreen(
|
||||||
var imageWidth by remember { mutableStateOf(0f) }
|
var imageWidth by remember { mutableStateOf(0f) }
|
||||||
var imageHeight by remember { mutableStateOf(0f) }
|
var imageHeight by remember { mutableStateOf(0f) }
|
||||||
|
|
||||||
|
// Check if this image is likely a segmented result based on the filename or uri content if available.
|
||||||
|
// However, we now have an explicit isSegmented flag which is more reliable for navigation flow
|
||||||
|
val isSegmentedResult = isSegmented || imageUri.contains("segmented")
|
||||||
|
|
||||||
val displayedUri = Uri.parse(imageUri)
|
val displayedUri = Uri.parse(imageUri)
|
||||||
var isSegmenting by remember { mutableStateOf(false) }
|
var isSegmenting by remember { mutableStateOf(false) }
|
||||||
|
|
||||||
|
|
@ -105,8 +116,8 @@ fun ViewImageScreen(
|
||||||
alignment = Alignment.Center
|
alignment = Alignment.Center
|
||||||
)
|
)
|
||||||
|
|
||||||
// Draw Bounding Box if available
|
// Draw Bounding Box if available AND NOT segmented
|
||||||
if (boundingBox != null && imageWidth > 0 && imageHeight > 0) {
|
if (!isSegmentedResult && boundingBox != null && imageWidth > 0 && imageHeight > 0) {
|
||||||
Canvas(modifier = Modifier.fillMaxSize()) {
|
Canvas(modifier = Modifier.fillMaxSize()) {
|
||||||
val canvasWidth = size.width
|
val canvasWidth = size.width
|
||||||
val canvasHeight = size.height
|
val canvasHeight = size.height
|
||||||
|
|
@ -165,7 +176,48 @@ fun ViewImageScreen(
|
||||||
Button(onClick = {
|
Button(onClick = {
|
||||||
scope.launch {
|
scope.launch {
|
||||||
isSegmenting = true
|
isSegmenting = true
|
||||||
val resultUri = segmenterHelper.segmentAndSave(displayedUri)
|
|
||||||
|
// Parse bounding box to crop
|
||||||
|
var cropBitmap: Bitmap? = null
|
||||||
|
|
||||||
|
if (boundingBox != null) {
|
||||||
|
try {
|
||||||
|
withContext(Dispatchers.IO) {
|
||||||
|
context.contentResolver.openInputStream(displayedUri)?.use { stream ->
|
||||||
|
val original = BitmapFactory.decodeStream(stream)
|
||||||
|
val parts = boundingBox!!.split(",")
|
||||||
|
if (parts.size == 4 && original != null) {
|
||||||
|
val left = parts[0].toFloatOrNull()?.toInt() ?: 0
|
||||||
|
val top = parts[1].toFloatOrNull()?.toInt() ?: 0
|
||||||
|
val right = parts[2].toFloatOrNull()?.toInt() ?: 0
|
||||||
|
val bottom = parts[3].toFloatOrNull()?.toInt() ?: 0
|
||||||
|
|
||||||
|
val w = right - left
|
||||||
|
val h = bottom - top
|
||||||
|
|
||||||
|
if (w > 0 && h > 0 && left >= 0 && top >= 0 &&
|
||||||
|
left + w <= original.width && top + h <= original.height) {
|
||||||
|
cropBitmap = Bitmap.createBitmap(original, left, top, w, h)
|
||||||
|
} else {
|
||||||
|
cropBitmap = original
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
cropBitmap = original
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (e: Exception) {
|
||||||
|
e.printStackTrace()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val bitmapToSegment = cropBitmap
|
||||||
|
val resultUri = if (bitmapToSegment != null) {
|
||||||
|
segmenterHelper.segmentAndSave(bitmapToSegment, animalId, orientation ?: "unknown", "Segmented images")
|
||||||
|
} else {
|
||||||
|
segmenterHelper.segmentAndSave(displayedUri, animalId, orientation ?: "unknown", "Segmented images")
|
||||||
|
}
|
||||||
|
|
||||||
if (resultUri != null) {
|
if (resultUri != null) {
|
||||||
onSegmented(resultUri.toString())
|
onSegmented(resultUri.toString())
|
||||||
}
|
}
|
||||||
|
|
@ -182,7 +234,6 @@ fun ViewImageScreen(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Show Back if explicitly requested OR if Accept is not shown (to avoid empty state or getting stuck)
|
|
||||||
if (showBack || !showAccept) {
|
if (showBack || !showAccept) {
|
||||||
Button(onClick = onBack) {
|
Button(onClick = onBack) {
|
||||||
Text("Back")
|
Text("Back")
|
||||||
|
|
|
||||||
|
|
@ -63,6 +63,9 @@ fun NavGraph(
|
||||||
val newImageOrientation = backStackEntry.savedStateHandle.get<String>("newImageOrientation")
|
val newImageOrientation = backStackEntry.savedStateHandle.get<String>("newImageOrientation")
|
||||||
val newVideoUri = backStackEntry.savedStateHandle.get<String>("newVideoUri")
|
val newVideoUri = backStackEntry.savedStateHandle.get<String>("newVideoUri")
|
||||||
|
|
||||||
|
// We listen for segmented image here too
|
||||||
|
val newSegmentedUri = backStackEntry.savedStateHandle.get<String>("newSegmentedUri")
|
||||||
|
|
||||||
LaunchedEffect(newImageUri, newImageOrientation) {
|
LaunchedEffect(newImageUri, newImageOrientation) {
|
||||||
if (newImageUri != null && newImageOrientation != null) {
|
if (newImageUri != null && newImageOrientation != null) {
|
||||||
viewModel.addPhoto(newImageOrientation, newImageUri)
|
viewModel.addPhoto(newImageOrientation, newImageUri)
|
||||||
|
|
@ -70,6 +73,14 @@ fun NavGraph(
|
||||||
backStackEntry.savedStateHandle.remove<String>("newImageOrientation")
|
backStackEntry.savedStateHandle.remove<String>("newImageOrientation")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
LaunchedEffect(newSegmentedUri) {
|
||||||
|
if (newSegmentedUri != null) {
|
||||||
|
viewModel.addSegmentedImage(newSegmentedUri)
|
||||||
|
backStackEntry.savedStateHandle.remove<String>("newSegmentedUri")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
LaunchedEffect(newVideoUri) {
|
LaunchedEffect(newVideoUri) {
|
||||||
if (newVideoUri != null) {
|
if (newVideoUri != null) {
|
||||||
viewModel.setVideo(newVideoUri)
|
viewModel.setVideo(newVideoUri)
|
||||||
|
|
@ -145,14 +156,23 @@ fun NavGraph(
|
||||||
showAccept = args.showAccept,
|
showAccept = args.showAccept,
|
||||||
showBack = args.showBack,
|
showBack = args.showBack,
|
||||||
showSegment = args.showSegment,
|
showSegment = args.showSegment,
|
||||||
|
animalId = args.animalId,
|
||||||
|
orientation = args.orientation,
|
||||||
onRetake = {
|
onRetake = {
|
||||||
navController.popBackStack()
|
navController.popBackStack()
|
||||||
// navController.navigate(Route.CameraScreen(...))
|
// navController.navigate(Route.CameraScreen(...))
|
||||||
},
|
},
|
||||||
onAccept = { uri ->
|
onAccept = { uri ->
|
||||||
|
// If it's a segmented result, add to segmented list
|
||||||
|
if (args.imageUri.contains("segmented")) {
|
||||||
|
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newSegmentedUri"] = uri
|
||||||
|
navController.popBackStack<Route.AddProfileScreen>(inclusive = false)
|
||||||
|
} else {
|
||||||
|
// Normal image
|
||||||
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageUri"] = uri
|
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageUri"] = uri
|
||||||
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageOrientation"] = args.orientation
|
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageOrientation"] = args.orientation
|
||||||
navController.popBackStack<Route.AddProfileScreen>(inclusive = false)
|
navController.popBackStack<Route.AddProfileScreen>(inclusive = false)
|
||||||
|
}
|
||||||
},
|
},
|
||||||
onSegmented = { segmentedUri ->
|
onSegmented = { segmentedUri ->
|
||||||
navController.navigate(Route.ViewImageScreen(
|
navController.navigate(Route.ViewImageScreen(
|
||||||
|
|
|
||||||
|
|
@ -32,6 +32,7 @@ sealed class Route {
|
||||||
val showAccept: Boolean = false,
|
val showAccept: Boolean = false,
|
||||||
val showBack: Boolean = false,
|
val showBack: Boolean = false,
|
||||||
val showSegment: Boolean = false,
|
val showSegment: Boolean = false,
|
||||||
|
val isSegmented: Boolean = false,
|
||||||
val animalId: String
|
val animalId: String
|
||||||
) : Route()
|
) : Route()
|
||||||
@Serializable
|
@Serializable
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue