code review and fix

home, add profile, camera capture
This commit is contained in:
SaiD 2025-12-20 13:37:33 +05:30
parent d5902cba08
commit a6ea1d5ce0
16 changed files with 359 additions and 571 deletions

View File

@ -43,7 +43,6 @@ class MainActivity : ComponentActivity() {
val settings by appDataUseCases.getSettings().collectAsState(initial = null)
val context = LocalContext.current
// Update locale and provide it through CompositionLocalProvider
val localizedContext = settings?.let {
LocaleHelper.applyLocale(context, it.language)
} ?: context
@ -65,13 +64,7 @@ class MainActivity : ComponentActivity() {
)
Box(modifier = Modifier.background(color = MaterialTheme.colorScheme.background)) {
val startDestination = viewModel.startDestination.value
// Ensure startDestination is not null before rendering NavGraph
if (startDestination != null) {
NavGraph(startDestination = startDestination)
} else {
// Optional: Show a loading indicator if startDestination is null
// for an extended period, though splash screen should handle it.
}
NavGraph(startDestination = startDestination)
}
}
}

View File

@ -247,9 +247,17 @@ class CSVDataSource(
calvingNumber = row[INDEX_CALVING].toIntOrNull() ?: 0,
reproductiveStatus = row[INDEX_REPRO],
description = row[INDEX_DESC],
images = row[INDEX_IMAGES].split(";").filter { it.isNotBlank() },
images = row[INDEX_IMAGES].split(';').asSequence().filter { it.isNotBlank() }
.map { pair ->
val (k, v) = pair.split('=', limit = 2)
k to v
}.toMap(),
video = row[INDEX_VIDEO],
segmentedImages = row.getOrNull(INDEX_SEGMENTED_IMAGES)?.split(";")?.filter { it.isNotBlank() } ?: emptyList()
segmentedImages = row.getOrNull(INDEX_SEGMENTED_IMAGES)?.split(';')?.asSequence()?.filter { it.isNotBlank() }
?.map { pair ->
val (k, v) = pair.split('=', limit = 2)
k to v
}?.toMap() ?: emptyMap()
)
}
@ -313,9 +321,9 @@ class CSVDataSource(
row[INDEX_CALVING] = d.calvingNumber.toString()
row[INDEX_REPRO] = d.reproductiveStatus
row[INDEX_DESC] = d.description
row[INDEX_IMAGES] = d.images.joinToString(";")
row[INDEX_IMAGES] = d.images.entries.joinToString(";") { (k, v) -> "$k=$v" }
row[INDEX_VIDEO] = d.video
row[INDEX_SEGMENTED_IMAGES] = d.segmentedImages.joinToString(";")
row[INDEX_SEGMENTED_IMAGES] = d.segmentedImages.entries.joinToString(";") { (k, v) -> "$k=$v" }
return row
}

View File

@ -16,13 +16,5 @@ class AnimalDetailsRepositoryImpl(
dataSource.setAnimalDetails(animalDetails)
}
override suspend fun deleteAnimalDetails(id: String) {
// Currently only full profile deletion is exposed by DataSource as per request
// but we can call that if needed, or just leave it as no-op if details deletion is specific
// Assuming for now it deletes the profile or we might need to add specific delete to DataSource
// But the prompt said "DataSource should have get, set and delete... get and delete based on a string which will be id"
// And "deleteAnimalProfile - takes an Id deletes that animals complete profile"
// So strictly for details, maybe we don't have a specific delete or we just don't impl it yet.
// However, to satisfy the interface:
}
override suspend fun deleteAnimalDetails(id: String) { }
}

View File

@ -181,7 +181,7 @@ val appModule = module {
viewModel { HomeViewModel(get()) }
viewModel { OnBoardingViewModel(get()) }
viewModel { (savedStateHandle: androidx.lifecycle.SavedStateHandle?) ->
AddProfileViewModel(get(), get(), get(), androidContext(), savedStateHandle)
AddProfileViewModel(get(), get(), androidContext(), savedStateHandle)
}
viewModel { ListingsViewModel(get()) }
viewModel { SettingsViewModel(get()) }

View File

@ -8,7 +8,6 @@ import android.graphics.Color
import android.net.Uri
import android.os.Build
import android.provider.MediaStore
import android.provider.OpenableColumns
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.subject.SubjectSegmentation
import com.google.mlkit.vision.segmentation.subject.SubjectSegmenterOptions
@ -19,254 +18,119 @@ import kotlin.coroutines.resumeWithException
class SubjectSegmenterHelper(private val context: Context) {
suspend fun segmentToBitmap(inputBitmap: Bitmap): Bitmap? {
return suspendCancellableCoroutine { continuation ->
try {
val image = InputImage.fromBitmap(inputBitmap, 0)
val options = SubjectSegmenterOptions.Builder()
.enableMultipleSubjects(
SubjectSegmenterOptions.SubjectResultOptions.Builder()
.enableSubjectBitmap()
.build()
)
.build()
val segmenter = SubjectSegmentation.getClient(options)
private suspend fun segmentInternal(image: InputImage): Bitmap? =
suspendCancellableCoroutine { continuation ->
segmenter.process(image)
.addOnSuccessListener { result ->
val subjects = result.subjects
if (subjects.isNotEmpty()) {
// Find the largest subject
val mainSubject = subjects.maxByOrNull { it.width * it.height }
val options = SubjectSegmenterOptions.Builder()
.enableMultipleSubjects(
SubjectSegmenterOptions.SubjectResultOptions.Builder()
.enableSubjectBitmap()
.build()
)
.build()
if (mainSubject != null && mainSubject.bitmap != null) {
try {
val resultBitmap = Bitmap.createBitmap(
image.width,
image.height,
Bitmap.Config.ARGB_8888
)
val canvas = Canvas(resultBitmap)
canvas.drawColor(Color.BLACK)
val segmenter = SubjectSegmentation.getClient(options)
val subjectBitmap = mainSubject.bitmap!!
canvas.drawBitmap(
subjectBitmap,
mainSubject.startX.toFloat(),
mainSubject.startY.toFloat(),
null
)
continuation.resume(resultBitmap)
} catch (e: Exception) {
continuation.resumeWithException(e)
}
} else {
continuation.resume(null)
}
} else {
continuation.resume(null)
}
segmenter.process(image)
.addOnSuccessListener { result ->
val subject = result.subjects
.maxByOrNull { it.width * it.height }
if (subject?.bitmap == null) {
continuation.resume(null)
return@addOnSuccessListener
}
.addOnFailureListener { e ->
try {
val output = Bitmap.createBitmap(
image.width,
image.height,
Bitmap.Config.ARGB_8888
)
val canvas = Canvas(output)
canvas.drawColor(Color.BLACK)
canvas.drawBitmap(
subject.bitmap!!,
subject.startX.toFloat(),
subject.startY.toFloat(),
null
)
continuation.resume(output)
} catch (e: Exception) {
continuation.resumeWithException(e)
}
.addOnCompleteListener {
segmenter.close()
}
} catch (e: Exception) {
continuation.resumeWithException(e)
}
}
}
suspend fun segmentAndSave(inputBitmap: Bitmap, animalId: String, orientation: String, subFolder: String? = null): Uri? {
return suspendCancellableCoroutine { continuation ->
try {
val image = InputImage.fromBitmap(inputBitmap, 0)
val options = SubjectSegmenterOptions.Builder()
.enableMultipleSubjects(
SubjectSegmenterOptions.SubjectResultOptions.Builder()
.enableSubjectBitmap()
.build()
)
.build()
val segmenter = SubjectSegmentation.getClient(options)
segmenter.process(image)
.addOnSuccessListener { result ->
val subjects = result.subjects
if (subjects.isNotEmpty()) {
// Find the largest subject
val mainSubject = subjects.maxByOrNull { it.width * it.height }
if (mainSubject != null && mainSubject.bitmap != null) {
try {
val resultBitmap = Bitmap.createBitmap(
image.width,
image.height,
Bitmap.Config.ARGB_8888
)
val canvas = Canvas(resultBitmap)
canvas.drawColor(Color.BLACK)
val subjectBitmap = mainSubject.bitmap!!
canvas.drawBitmap(
subjectBitmap,
mainSubject.startX.toFloat(),
mainSubject.startY.toFloat(),
null
)
val filename = "${animalId}_${orientation}_segmented.jpg"
val contentValues = ContentValues().apply {
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val path = if (subFolder != null) "Pictures/LivingAI/$animalId/$subFolder" else "Pictures/LivingAI/$animalId"
put(MediaStore.MediaColumns.RELATIVE_PATH, path)
}
}
val uri = context.contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, contentValues)
if (uri != null) {
val outputStream: OutputStream? = context.contentResolver.openOutputStream(uri)
outputStream?.use { out ->
resultBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
}
continuation.resume(uri)
} else {
continuation.resume(null)
}
} catch (e: Exception) {
continuation.resumeWithException(e)
}
} else {
continuation.resume(null)
}
} else {
continuation.resume(null)
}
}
.addOnFailureListener { e ->
continuation.resumeWithException(e)
}
.addOnCompleteListener {
segmenter.close()
}
} catch (e: Exception) {
continuation.resumeWithException(e)
}
}
}
suspend fun segmentAndSave(inputUri: Uri, animalId: String, orientation: String, subFolder: String? = null): Uri? {
return suspendCancellableCoroutine { continuation ->
try {
val image = InputImage.fromFilePath(context, inputUri)
val options = SubjectSegmenterOptions.Builder()
.enableMultipleSubjects(
SubjectSegmenterOptions.SubjectResultOptions.Builder()
.enableSubjectBitmap()
.build()
)
.build()
val segmenter = SubjectSegmentation.getClient(options)
segmenter.process(image)
.addOnSuccessListener { result ->
val subjects = result.subjects
if (subjects.isNotEmpty()) {
// Find the largest subject (assuming it's the one in front/main subject)
val mainSubject = subjects.maxByOrNull { it.width * it.height }
if (mainSubject != null && mainSubject.bitmap != null) {
try {
val resultBitmap = Bitmap.createBitmap(
image.width,
image.height,
Bitmap.Config.ARGB_8888
)
val canvas = Canvas(resultBitmap)
canvas.drawColor(Color.BLACK)
val subjectBitmap = mainSubject.bitmap!!
canvas.drawBitmap(
subjectBitmap,
mainSubject.startX.toFloat(),
mainSubject.startY.toFloat(),
null
)
val filename = "${animalId}_${orientation}_segmented.jpg"
val contentValues = ContentValues().apply {
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val path = if (subFolder != null) "Pictures/LivingAI/$animalId/$subFolder" else "Pictures/LivingAI/$animalId"
put(MediaStore.MediaColumns.RELATIVE_PATH, path)
}
}
val uri = context.contentResolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, contentValues)
if (uri != null) {
val outputStream: OutputStream? = context.contentResolver.openOutputStream(uri)
outputStream?.use { out ->
resultBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
}
continuation.resume(uri)
} else {
continuation.resume(null)
}
} catch (e: Exception) {
continuation.resumeWithException(e)
}
} else {
continuation.resume(null)
}
} else {
continuation.resume(null)
}
}
.addOnFailureListener { e ->
continuation.resumeWithException(e)
}
.addOnCompleteListener {
segmenter.close()
}
} catch (e: Exception) {
continuation.resumeWithException(e)
}
}
}
private fun getFileName(uri: Uri): String? {
var result: String? = null
if (uri.scheme == "content") {
val cursor = context.contentResolver.query(uri, null, null, null, null)
try {
if (cursor != null && cursor.moveToFirst()) {
val index = cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME)
if (index >= 0) {
result = cursor.getString(index)
}
}
} catch (e: Exception) {
// ignore
} finally {
cursor?.close()
.addOnFailureListener { e ->
continuation.resumeWithException(e)
}
.addOnCompleteListener {
segmenter.close()
}
}
suspend fun segmentToBitmap(inputBitmap: Bitmap): Bitmap? {
val image = InputImage.fromBitmap(inputBitmap, 0)
return segmentInternal(image)
}
suspend fun segmentAndSave(
inputBitmap: Bitmap,
animalId: String,
orientation: String,
subFolder: String? = null
): Uri? {
val image = InputImage.fromBitmap(inputBitmap, 0)
val bitmap = segmentInternal(image) ?: return null
return saveBitmap(bitmap, animalId, orientation, subFolder)
}
suspend fun segmentAndSave(
inputUri: Uri,
animalId: String,
orientation: String,
subFolder: String? = null
): Uri? {
val image = InputImage.fromFilePath(context, inputUri)
val bitmap = segmentInternal(image) ?: return null
return saveBitmap(bitmap, animalId, orientation, subFolder)
}
private fun saveBitmap(
bitmap: Bitmap,
animalId: String,
orientation: String,
subFolder: String?
): Uri? {
val filename = "${animalId}_${orientation}_segmented.jpg"
val values = ContentValues().apply {
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val path =
if (subFolder != null)
"Pictures/LivingAI/$animalId/$subFolder"
else
"Pictures/LivingAI/$animalId"
put(MediaStore.MediaColumns.RELATIVE_PATH, path)
}
}
if (result == null) {
result = uri.path
val cut = result?.lastIndexOf('/')
if (cut != null && cut != -1) {
result = result?.substring(cut + 1)
}
val uri = context.contentResolver.insert(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
values
) ?: return null
val outputStream: OutputStream? =
context.contentResolver.openOutputStream(uri)
outputStream?.use { out ->
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
}
return result
return uri
}
}

View File

@ -12,7 +12,7 @@ data class AnimalDetails(
val calvingNumber: Int,
val reproductiveStatus: String,
val description: String,
val images: List<String>,
val images: Map<String, String>,
val video: String,
val segmentedImages: List<String> = emptyList()
val segmentedImages: Map<String, String>
)

View File

@ -47,6 +47,8 @@ import com.example.livingai.utils.Constants
fun AddProfileScreen(
navController: NavController,
viewModel: AddProfileViewModel,
animalId: String?,
loadEntry: Boolean,
onSave: () -> Unit,
onCancel: () -> Unit,
onTakePhoto: (String) -> Unit,
@ -54,11 +56,12 @@ fun AddProfileScreen(
) {
val context = LocalContext.current
// If opened for edit, attempt to load existing animal details
LaunchedEffect(Unit) {
val existing = viewModel.savedStateHandle?.get<String>("animalId")
val loadEntry = viewModel.savedStateHandle?.get<Boolean>("loadEntry")
if (existing != null && loadEntry == true) viewModel.loadAnimal(existing)
LaunchedEffect(animalId, loadEntry) {
if (loadEntry && animalId != null) {
viewModel.loadAnimal(animalId)
} else {
viewModel.initializeNewProfile()
}
}
val speciesList = stringArrayResource(id = R.array.species_list).toList()
@ -137,7 +140,7 @@ fun AddProfileScreen(
labelRes = R.string.label_species,
options = speciesList,
selected = species,
onSelected = { species = it },
onSelected = viewModel::validateSpeciesInputs,
modifier = Modifier.focusRequester(speciesFocus),
isError = speciesError != null,
supportingText = speciesError
@ -147,7 +150,7 @@ fun AddProfileScreen(
labelRes = R.string.label_breed,
options = breedList,
selected = breed,
onSelected = { breed = it },
onSelected = viewModel::validateBreedInputs,
modifier = Modifier.focusRequester(breedFocus),
isError = breedError != null,
supportingText = breedError
@ -163,7 +166,7 @@ fun AddProfileScreen(
modifier = Modifier
.weight(1f)
.focusRequester(ageFocus),
onValueChange = { age = it },
onValueChange = viewModel::validateAgeInputs,
keyboardType = KeyboardType.Number,
isError = ageError != null,
supportingText = ageError
@ -175,7 +178,7 @@ fun AddProfileScreen(
modifier = Modifier
.weight(1f)
.focusRequester(milkYieldFocus),
onValueChange = { milkYield = it },
onValueChange = viewModel::validateMilkYieldInputs,
keyboardType = KeyboardType.Number,
isError = milkYieldError != null,
supportingText = milkYieldError
@ -188,7 +191,7 @@ fun AddProfileScreen(
modifier = Modifier
.fillMaxWidth()
.focusRequester(calvingNumberFocus),
onValueChange = { calvingNumber = it },
onValueChange = viewModel::validateCalvingInputs,
keyboardType = KeyboardType.Number,
isError = calvingNumberError != null,
supportingText = calvingNumberError
@ -198,7 +201,7 @@ fun AddProfileScreen(
titleRes = R.string.label_reproductive_status,
options = reproList,
selected = reproductiveStatus,
onSelected = { reproductiveStatus = it },
onSelected = viewModel::validateReproductiveStatusInputs,
isError = reproductiveStatusError != null
)
if (reproductiveStatusError != null) {

View File

@ -22,7 +22,6 @@ import kotlinx.coroutines.withContext
class AddProfileViewModel(
private val profileEntryUseCase: ProfileEntryUseCase,
private val getAnimalDetails: GetAnimalDetails,
private val dispatchers: CoroutineDispatchers,
private val context: Context,
val savedStateHandle: SavedStateHandle? = null
@ -57,99 +56,43 @@ class AddProfileViewModel(
// State for photos and video
val photos = mutableStateMapOf<String, String>()
val segmentedImages = mutableStateMapOf<String, String>()
private val _videoUri = mutableStateOf<String?>(null)
val videoUri: State<String?> = _videoUri
// State for segmented images
val segmentedImages = mutableListOf<String>()
fun loadAnimal(animalId: String) {
if (animalId == _currentAnimalId.value) return
fun loadAnimal(animalId: String?) {
if (animalId == null) {
val newId = IdGenerator.generateAnimalId()
_currentAnimalId.value = newId
_animalDetails.value = null
_currentAnimalId.value = animalId
// Reset UI State
species.value = null
breed.value = null
age.value = ""
milkYield.value = ""
calvingNumber.value = ""
reproductiveStatus.value = null
description.value = ""
clearErrors()
profileEntryUseCase.getAnimalDetails(animalId).onEach { details ->
if (details != null) {
_animalDetails.value = details
photos.clear()
segmentedImages.clear()
_videoUri.value = null
} else {
_currentAnimalId.value = animalId
profileEntryUseCase.getAnimalDetails(animalId).onEach { details ->
if (details != null) {
_animalDetails.value = details
// Populate UI State
species.value = details.species.ifBlank { null }
breed.value = details.breed.ifBlank { null }
age.value = if (details.age == 0) "" else details.age.toString()
milkYield.value = if (details.milkYield == 0) "" else details.milkYield.toString()
calvingNumber.value = if (details.calvingNumber == 0) "" else details.calvingNumber.toString()
reproductiveStatus.value = details.reproductiveStatus.ifBlank { null }
description.value = details.description
clearErrors()
// Populate UI State
species.value = details.species.ifBlank { null }
breed.value = details.breed.ifBlank { null }
age.value = if (details.age == 0) "" else details.age.toString()
milkYield.value = if (details.milkYield == 0) "" else details.milkYield.toString()
calvingNumber.value = if (details.calvingNumber == 0) "" else details.calvingNumber.toString()
reproductiveStatus.value = details.reproductiveStatus.ifBlank { null }
description.value = details.description
clearErrors()
photos.clear()
segmentedImages.clear()
// Populate photos
photos.clear()
segmentedImages.clear()
segmentedImages.addAll(details.segmentedImages)
// Process images on IO thread as it may involve DB queries
withContext(dispatchers.io) {
val photoMap = mutableMapOf<String, String>()
details.images.forEach { path ->
val uri = Uri.parse(path)
val filename = getFileName(uri) ?: path.substringAfterLast('/')
val nameWithoutExt = filename.substringBeforeLast('.')
// Skip segmented images for the main thumbnails
if (nameWithoutExt.contains("segmented", ignoreCase = true)) {
return@forEach
}
// Find orientation in filename
var foundOrientation: String? = null
for (o in Constants.silhouetteList) {
if (nameWithoutExt.contains(o, ignoreCase = true)) {
foundOrientation = o
}
}
val parts = nameWithoutExt.split('_')
val matchingPart = parts.find { part ->
Constants.silhouetteList.any { it.equals(part, ignoreCase = true) }
}
if (matchingPart != null) {
val key = Constants.silhouetteList.find { it.equals(matchingPart, ignoreCase = true) }
if (key != null) {
photoMap[key] = path
}
} else {
val sortedOrientations = Constants.silhouetteList.sortedByDescending { it.length }
val match = sortedOrientations.find { nameWithoutExt.contains(it, ignoreCase = true) }
if (match != null) {
photoMap[match] = path
}
}
}
withContext(dispatchers.main) {
photoMap.forEach { (k, v) -> photos[k] = v }
}
withContext(dispatchers.main) {
details.images.entries.forEach { (orientation, path) ->
photos[orientation] = path
}
details.segmentedImages.entries.forEach { (orientation, path) ->
segmentedImages[orientation] = path
}
_videoUri.value = details.video.ifBlank { null }
}
}.launchIn(viewModelScope)
}
_videoUri.value = details.video.ifBlank { null }
}
}.launchIn(viewModelScope)
}
private fun clearErrors() {
@ -161,48 +104,22 @@ class AddProfileViewModel(
reproductiveStatusError.value = null
}
private fun getFileName(uri: Uri): String? {
var result: String? = null
if (uri.scheme == "content") {
val cursor = context.contentResolver.query(uri, null, null, null, null)
try {
if (cursor != null && cursor.moveToFirst()) {
val index = cursor.getColumnIndex(OpenableColumns.DISPLAY_NAME)
if (index >= 0) {
result = cursor.getString(index)
}
}
} catch (e: Exception) {
e.printStackTrace()
} finally {
cursor?.close()
}
}
if (result == null) {
result = uri.path
val cut = result?.lastIndexOf('/')
if (cut != null && cut != -1) {
result = result?.substring(cut + 1)
}
}
return result
}
fun addPhoto(orientation: String, uri: String) {
photos[orientation] = uri
}
fun addSegmentedImage(uri: String) {
if (!segmentedImages.contains(uri)) {
segmentedImages.add(uri)
}
fun addSegmentedImage(orientation: String, uri: String) {
segmentedImages[orientation] = uri
}
fun setVideo(uri: String) {
_videoUri.value = uri
}
fun validateInputs(): Boolean {
fun validateSpeciesInputs(s: String? = null): Boolean {
if (s != null)
species.value = s
var isValid = true
if (species.value.isNullOrBlank()) {
@ -211,6 +128,14 @@ class AddProfileViewModel(
} else {
speciesError.value = null
}
return isValid
}
fun validateBreedInputs(b: String? = null): Boolean {
if (b != null)
breed.value = b
var isValid = true
if (breed.value.isNullOrBlank()) {
breedError.value = "Breed is required"
@ -219,21 +144,47 @@ class AddProfileViewModel(
breedError.value = null
}
return isValid
}
fun validateReproductiveStatusInputs(r: String? = null): Boolean {
if (r != null)
reproductiveStatus.value = r
var isValid = true
if (reproductiveStatus.value.isNullOrBlank()) {
reproductiveStatusError.value = "Status is required"
isValid = false
} else {
reproductiveStatusError.value = null
}
return isValid
}
fun validateAgeInputs(a: String? = null): Boolean {
if (a != null)
age.value = a
var isValid = true
val ageInt = age.value.toIntOrNull()
if (ageInt == null || ageInt <= 0 || ageInt > 20) {
ageError.value = "Invalid age"
isValid = false
} else {
ageError.value = null
ageError.value = null
}
return isValid
}
fun validateMilkYieldInputs(m: String? = null): Boolean {
if (m != null)
milkYield.value = m
var isValid = true
val milkInt = milkYield.value.toIntOrNull()
if (milkInt == null || milkInt <= 0 || milkInt > 75) {
milkYieldError.value = "Invalid milk yield"
@ -242,6 +193,15 @@ class AddProfileViewModel(
milkYieldError.value = null
}
return isValid
}
fun validateCalvingInputs(c: String? = null): Boolean {
if (c != null)
calvingNumber.value = c
var isValid = true
val calvingInt = calvingNumber.value.toIntOrNull()
if (calvingInt == null || calvingInt < 0 || calvingInt > 12) {
calvingNumberError.value = "Invalid calving number"
@ -253,6 +213,11 @@ class AddProfileViewModel(
return isValid
}
private fun validateInputs(): Boolean {
return validateSpeciesInputs() && validateBreedInputs() && validateAgeInputs()
&& validateMilkYieldInputs() && validateCalvingInputs() && validateReproductiveStatusInputs()
}
fun saveAnimalDetails(): Boolean {
if (!validateInputs()) return false
@ -267,9 +232,9 @@ class AddProfileViewModel(
calvingNumber = calvingNumber.value.toIntOrNull() ?: 0,
reproductiveStatus = reproductiveStatus.value ?: "",
description = description.value,
images = photos.values.toList(),
images = photos,
video = _videoUri.value ?: "",
segmentedImages = segmentedImages.toList(),
segmentedImages = segmentedImages,
name = "", sex = "", weight = 0
)
@ -285,8 +250,23 @@ class AddProfileViewModel(
_saveSuccess.value = false
}
init {
val animalId: String? = savedStateHandle?.get<String>("animalId")
loadAnimal(animalId)
fun initializeNewProfile() {
val newId = IdGenerator.generateAnimalId()
_currentAnimalId.value = newId
_animalDetails.value = null
// Reset UI State
species.value = null
breed.value = null
age.value = ""
milkYield.value = ""
calvingNumber.value = ""
reproductiveStatus.value = null
description.value = ""
clearErrors()
photos.clear()
segmentedImages.clear()
_videoUri.value = null
}
}

View File

@ -134,9 +134,6 @@ fun CameraCaptureScreen(
}
if (uri != null) {
// This screen can be called from AddProfileScreen or ViewImageScreen (for retakes).
// In both cases, we pop the back stack and set the result on the previous entry.
// The NavGraph logic for each screen handles the received URI accordingly.
navController.previousBackStackEntry?.savedStateHandle?.set("newImageUri", uri.toString())
navController.previousBackStackEntry?.savedStateHandle?.set("newImageOrientation", orientation)
navController.popBackStack()
@ -326,32 +323,12 @@ fun SegmentationOverlay(
val offsetX = (canvasWidth - imageWidth * scale) / 2f
val offsetY = (canvasHeight - imageHeight * scale) / 2f
// The mask corresponds to the cropped and resized area of the silhouette,
// but here we are receiving the raw mask from MockPoseAnalyzer which seems to match the resized bitmap
// used for comparison (silhouette.croppedBitmap size).
// However, MockPoseAnalyzer.segment returns a mask of size `bitmap.width * bitmap.height`
// where `bitmap` is the resized crop.
// Wait, looking at MockPoseAnalyzer.analyze:
// 1. crops image to animalBounds
// 2. resizes crop to silhouette.croppedBitmap dimensions
// 3. segments resized crop -> mask
// So the mask is small (e.g. 100x100). We need to draw it scaled up to the animalBounds on screen.
val boxLeft = animalBounds.left * scale + offsetX
val boxTop = animalBounds.top * scale + offsetY
val boxWidth = animalBounds.width() * scale
val boxHeight = animalBounds.height() * scale
// We need to know the dimensions of the mask grid to draw it properly.
// Since we don't pass dimensions, we can infer if it's square or pass it.
// Assuming square for simplicity as per SilhouetteManager usually?
// Actually, we can just draw points.
val maskSize = kotlin.math.sqrt(mask.size.toDouble()).toInt()
// Ideally we should pass width/height of the mask.
// For now let's assume the mask matches the aspect ratio of the box or is just a grid.
if (maskSize > 0) {
val pixelW = boxWidth / maskSize
@ -395,7 +372,6 @@ fun InstructionOverlay(
color = Color.White,
textAlign = TextAlign.Center
)
// Visual GIF logic would go here using instruction.animationResId
}
}
}
@ -413,8 +389,6 @@ fun DetectionOverlay(
val canvasWidth = size.width
val canvasHeight = size.height
// This calculation assumes the camera preview's scale type is `FILL_CENTER`.
// It maintains the aspect ratio of the image and centers it.
val widthRatio = canvasWidth / imageWidth
val heightRatio = canvasHeight / imageHeight
val scale = max(widthRatio, heightRatio)
@ -422,7 +396,6 @@ fun DetectionOverlay(
val offsetX = (canvasWidth - imageWidth * scale) / 2f
val offsetY = (canvasHeight - imageHeight * scale) / 2f
// Helper to transform coordinates
val transform: (RectF) -> RectF = { box ->
RectF(
box.left * scale + offsetX,
@ -432,7 +405,6 @@ fun DetectionOverlay(
)
}
// Draw animal box (Yellow)
detection.animalBounds?.let {
val transformedBox = transform(it)
drawRect(
@ -443,7 +415,6 @@ fun DetectionOverlay(
)
}
// Draw reference object boxes (Cyan)
detection.referenceObjects.forEach { refObject ->
val transformedBox = transform(refObject.bounds)
drawRect(

View File

@ -1,19 +0,0 @@
// Obsolete file, replaced by CameraCaptureScreen.kt
// This file is kept to avoid breaking changes if referenced elsewhere, but the content is commented out to resolve errors.
// TODO: Migrate completely to CameraCaptureScreen or new CameraViewModel structure.
package com.example.livingai.pages.camera
import androidx.compose.runtime.Composable
import androidx.navigation.NavController
import org.koin.androidx.compose.koinViewModel
@Composable
fun CameraScreen(
// viewModel: CameraViewModel = koinViewModel(), // Commented out to fix build errors
navController: NavController,
orientation: String? = null,
animalId: String
) {
// Placeholder content
}

View File

@ -29,6 +29,7 @@ import androidx.compose.ui.geometry.Offset
import androidx.compose.ui.geometry.Size
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.drawscope.Stroke
import androidx.compose.ui.graphics.toArgb
import androidx.compose.ui.layout.ContentScale
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.unit.dp
@ -48,7 +49,6 @@ fun ViewImageScreen(
showAccept: Boolean,
showBack: Boolean,
showSegment: Boolean = false,
isSegmented: Boolean = false,
animalId: String,
orientation: String? = null,
onRetake: () -> Unit,
@ -62,10 +62,6 @@ fun ViewImageScreen(
var imageWidth by remember { mutableStateOf(0f) }
var imageHeight by remember { mutableStateOf(0f) }
// Check if this image is likely a segmented result based on the filename or uri content if available.
// However, we now have an explicit isSegmented flag which is more reliable for navigation flow
val isSegmentedResult = isSegmented || imageUri.contains("segmented")
val displayedUri = Uri.parse(imageUri)
var isSegmenting by remember { mutableStateOf(false) }
@ -78,7 +74,6 @@ fun ViewImageScreen(
val exif = ExifInterface(inputStream)
boundingBox = exif.getAttribute(ExifInterface.TAG_USER_COMMENT)
// Get image dimensions from Exif if possible, or we will rely on loading
val width = exif.getAttributeInt(ExifInterface.TAG_IMAGE_WIDTH, 0)
val height = exif.getAttributeInt(ExifInterface.TAG_IMAGE_LENGTH, 0)
if (width > 0 && height > 0) {
@ -116,13 +111,11 @@ fun ViewImageScreen(
alignment = Alignment.Center
)
// Draw Bounding Box if available AND NOT segmented
if (!isSegmentedResult && boundingBox != null && imageWidth > 0 && imageHeight > 0) {
if (boundingBox != null && imageWidth > 0 && imageHeight > 0) {
Canvas(modifier = Modifier.fillMaxSize()) {
val canvasWidth = size.width
val canvasHeight = size.height
// Parse bounding box string "left,top,right,bottom"
val parts = boundingBox!!.split(",")
if (parts.size == 4) {
val left = parts[0].toFloatOrNull() ?: 0f
@ -130,7 +123,6 @@ fun ViewImageScreen(
val right = parts[2].toFloatOrNull() ?: 0f
val bottom = parts[3].toFloatOrNull() ?: 0f
// Calculate scale and offset (CenterInside/Fit logic)
val widthRatio = canvasWidth / imageWidth
val heightRatio = canvasHeight / imageHeight
val scale = min(widthRatio, heightRatio)
@ -141,7 +133,6 @@ fun ViewImageScreen(
val offsetX = (canvasWidth - displayedWidth) / 2
val offsetY = (canvasHeight - displayedHeight) / 2
// Transform coordinates
val rectLeft = left * scale + offsetX
val rectTop = top * scale + offsetY
val rectRight = right * scale + offsetX
@ -177,51 +168,83 @@ fun ViewImageScreen(
scope.launch {
isSegmenting = true
// Parse bounding box to crop
var cropBitmap: Bitmap? = null
try {
val originalBitmap = withContext(Dispatchers.IO) {
context.contentResolver.openInputStream(displayedUri)?.use {
BitmapFactory.decodeStream(it)
}
}
if (boundingBox != null) {
try {
withContext(Dispatchers.IO) {
context.contentResolver.openInputStream(displayedUri)?.use { stream ->
val original = BitmapFactory.decodeStream(stream)
val parts = boundingBox!!.split(",")
if (parts.size == 4 && original != null) {
val left = parts[0].toFloatOrNull()?.toInt() ?: 0
val top = parts[1].toFloatOrNull()?.toInt() ?: 0
val right = parts[2].toFloatOrNull()?.toInt() ?: 0
val bottom = parts[3].toFloatOrNull()?.toInt() ?: 0
if (originalBitmap == null) {
isSegmenting = false
return@launch
}
val w = right - left
val h = bottom - top
val finalBitmap = if (boundingBox != null) {
if (w > 0 && h > 0 && left >= 0 && top >= 0 &&
left + w <= original.width && top + h <= original.height) {
cropBitmap = Bitmap.createBitmap(original, left, top, w, h)
} else {
cropBitmap = original
val parts = boundingBox!!.split(",")
if (parts.size == 4) {
val left = parts[0].toFloatOrNull()?.toInt() ?: 0
val top = parts[1].toFloatOrNull()?.toInt() ?: 0
val right = parts[2].toFloatOrNull()?.toInt() ?: 0
val bottom = parts[3].toFloatOrNull()?.toInt() ?: 0
val w = right - left
val h = bottom - top
if (
w > 0 && h > 0 &&
left >= 0 && top >= 0 &&
left + w <= originalBitmap.width &&
top + h <= originalBitmap.height
) {
val cropped = Bitmap.createBitmap(originalBitmap, left, top, w, h)
val segmentedCrop =
segmenterHelper.segmentToBitmap(cropped)
if (segmentedCrop != null) {
Bitmap.createBitmap(
originalBitmap.width,
originalBitmap.height,
Bitmap.Config.ARGB_8888
).apply {
android.graphics.Canvas(this).apply {
drawColor(Color.Black.toArgb())
drawBitmap(segmentedCrop, left.toFloat(), top.toFloat(), null)
}
}
} else {
cropBitmap = original
null
}
} else {
segmenterHelper.segmentToBitmap(originalBitmap)
}
} else {
segmenterHelper.segmentToBitmap(originalBitmap)
}
} catch (e: Exception) {
e.printStackTrace()
} else {
segmenterHelper.segmentToBitmap(originalBitmap)
}
}
val bitmapToSegment = cropBitmap
val resultUri = if (bitmapToSegment != null) {
segmenterHelper.segmentAndSave(bitmapToSegment, animalId, orientation ?: "unknown", "Segmented images")
} else {
segmenterHelper.segmentAndSave(displayedUri, animalId, orientation ?: "unknown", "Segmented images")
}
val resultUri = finalBitmap?.let {
segmenterHelper.segmentAndSave(
it,
animalId,
orientation ?: "unknown",
"Segmented images"
)
}
if (resultUri != null) {
onSegmented(resultUri.toString())
if (resultUri != null) {
onSegmented(resultUri.toString())
}
} catch (e: Exception) {
e.printStackTrace()
} finally {
isSegmenting = false
}
isSegmenting = false
}
}) {
Text("Segment")

View File

@ -35,20 +35,6 @@ import com.example.livingai.utils.Constants
@OptIn(ExperimentalMaterial3Api::class)
@Composable
fun HomeScreen(navController: NavController) {
val context = LocalContext.current
val silhouetteMap = remember {
Constants.silhouetteList.associateWith { item ->
val resId = context.resources.getIdentifier("label_${item}", "string", context.packageName)
if (resId != 0) context.getString(resId) else item
}
}
// Reverse map for lookup (Display Name -> ID)
val displayToIdMap = remember { silhouetteMap.entries.associate { (k, v) -> v to k } }
val orientationOptions = remember { silhouetteMap.values.toList() }
var selectedOrientationDisplay by remember { mutableStateOf(orientationOptions.firstOrNull() ?: "") }
CommonScaffold(
navController = navController,
title = stringResource(id = R.string.app_name),
@ -84,26 +70,6 @@ fun HomeScreen(navController: NavController) {
text = stringResource(id = R.string.top_bar_add_profile),
onClick = { navController.navigate(Route.AddProfileScreen()) }
)
// Spacer(modifier = Modifier.height(Dimentions.SMALL_PADDING))
//
// // Dropdown for selecting orientation
// LabeledDropdown(
// labelRes = R.string.default_orientation_label, // Or create a generic "Orientation" label
// options = orientationOptions,
// selected = selectedOrientationDisplay,
// onSelected = { selectedOrientationDisplay = it },
// modifier = Modifier.fillMaxWidth()
// )
//
// HomeButton(
// text = "Camera Capture",
// onClick = {
// val orientationId = displayToIdMap[selectedOrientationDisplay] ?: "side"
// navController.navigate(Route.CameraScreen(orientation = orientationId, animalId = "home_test"))
// }
// )
}
}
}

View File

@ -21,11 +21,7 @@ class HomeViewModel(
init {
appDataUseCases.readAppEntry().onEach { shouldStartFromHomeScreen ->
// if(shouldStartFromHomeScreen){
_startDestination.value = Route.HomeNavigation
// }else{
// _startDestination.value = Route.AppStartNavigation
// }
_startDestination.value = Route.HomeNavigation
delay(350) //Without this delay, the onBoarding screen will show for a momentum.
_splashCondition.value = false
}.launchIn(viewModelScope)

View File

@ -3,6 +3,9 @@ package com.example.livingai.pages.navigation
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.getValue
import androidx.compose.runtime.mutableStateOf
import androidx.compose.runtime.remember
import androidx.compose.runtime.setValue
import androidx.navigation.compose.NavHost
import androidx.navigation.compose.composable
import androidx.navigation.compose.navigation
@ -63,24 +66,22 @@ fun NavGraph(
val newImageOrientation = backStackEntry.savedStateHandle.get<String>("newImageOrientation")
val newVideoUri = backStackEntry.savedStateHandle.get<String>("newVideoUri")
// We listen for segmented image here too
val newSegmentedUri = backStackEntry.savedStateHandle.get<String>("newSegmentedUri")
LaunchedEffect(newImageUri, newImageOrientation) {
if (newImageUri != null && newImageOrientation != null) {
viewModel.addPhoto(newImageOrientation, newImageUri)
backStackEntry.savedStateHandle.remove<String>("newImageUri")
LaunchedEffect(newImageUri, newImageOrientation, newSegmentedUri) {
if (newImageOrientation != null) {
if (newSegmentedUri != null) {
viewModel.addSegmentedImage(newImageOrientation, newSegmentedUri)
backStackEntry.savedStateHandle.remove<String>("newSegmentedUri")
}
if (newImageUri != null) {
viewModel.addPhoto(newImageOrientation, newImageUri)
backStackEntry.savedStateHandle.remove<String>("newImageUri")
}
backStackEntry.savedStateHandle.remove<String>("newImageOrientation")
}
}
LaunchedEffect(newSegmentedUri) {
if (newSegmentedUri != null) {
viewModel.addSegmentedImage(newSegmentedUri)
backStackEntry.savedStateHandle.remove<String>("newSegmentedUri")
}
}
LaunchedEffect(newVideoUri) {
if (newVideoUri != null) {
viewModel.setVideo(newVideoUri)
@ -93,6 +94,8 @@ fun NavGraph(
AddProfileScreen(
navController = navController,
viewModel = viewModel,
animalId = route.animalId,
loadEntry = route.loadEntry,
onSave = {
val isSaved = viewModel.saveAnimalDetails()
if (isSaved)
@ -151,8 +154,17 @@ fun NavGraph(
composable<Route.ViewImageScreen> { backStackEntry ->
val args: Route.ViewImageScreen = backStackEntry.toRoute()
var imageUri by remember { mutableStateOf(args.imageUri) }
val newImageUri = backStackEntry.savedStateHandle.get<String>("newImageUri")
LaunchedEffect(newImageUri) {
if (newImageUri != null) {
imageUri = newImageUri
backStackEntry.savedStateHandle.remove<String>("newImageUri")
}
}
ViewImageScreen(
imageUri = args.imageUri,
imageUri = imageUri,
shouldAllowRetake = args.shouldAllowRetake,
showAccept = args.showAccept,
showBack = args.showBack,
@ -160,16 +172,14 @@ fun NavGraph(
animalId = args.animalId,
orientation = args.orientation,
onRetake = {
navController.popBackStack()
args.orientation?.let { navController.navigate(Route.CameraScreen(orientation = it, animalId = args.animalId)) }
},
onAccept = { uri ->
// If it's a segmented result, add to segmented list
if (args.imageUri.contains("segmented")) {
if (imageUri.contains("segmented")) {
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newSegmentedUri"] = uri
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageOrientation"] = args.orientation
navController.popBackStack<Route.AddProfileScreen>(inclusive = false)
} else {
// Normal image
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageUri"] = uri
navController.getBackStackEntry<Route.AddProfileScreen>().savedStateHandle["newImageOrientation"] = args.orientation
navController.popBackStack<Route.AddProfileScreen>(inclusive = false)
@ -183,7 +193,8 @@ fun NavGraph(
showAccept = true,
showBack = true,
showSegment = false,
animalId = args.animalId
animalId = args.animalId,
isSegmented = true
))
},
onBack = { navController.popBackStack() }

View File

@ -24,7 +24,7 @@ class RatingViewModel(
private val _ratingState = MutableStateFlow<AnimalRating?>(null)
val ratingState = _ratingState.asStateFlow()
private val _animalImages = MutableStateFlow<List<String>>(emptyList())
private val _animalImages = MutableStateFlow<Map<String, String>>(emptyMap())
val animalImages = _animalImages.asStateFlow()
private val animalId: String = savedStateHandle.get<String>("animalId")!!
@ -36,7 +36,7 @@ class RatingViewModel(
private fun loadAnimalDetails() {
getAnimalDetails(animalId).onEach {
_animalImages.value = it?.images ?: emptyList()
_animalImages.value = it?.images ?: emptyMap()
}.launchIn(viewModelScope)
}