Compare commits

...

No commits in common. "animal-rating-android-studio" and "livingai-cleanarch-kmp" have entirely different histories.

203 changed files with 10126 additions and 4441 deletions

View File

@ -4,17 +4,6 @@
<selectionStates>
<SelectionState runConfigName="app">
<option name="selectionMode" value="DROPDOWN" />
<DropdownSelection timestamp="2025-11-24T18:35:29.331278100Z">
<Target type="DEFAULT_BOOT">
<handle>
<DeviceId pluginId="PhysicalDevice" identifier="serial=10BF45100J001X5" />
</handle>
</Target>
</DropdownSelection>
<DialogSelection />
</SelectionState>
<SelectionState runConfigName="release">
<option name="selectionMode" value="DROPDOWN" />
</SelectionState>
</selectionStates>
</component>

View File

@ -0,0 +1,61 @@
<component name="InspectionProjectProfileManager">
<profile version="1.0">
<option name="myName" value="Project Default" />
<inspection_tool class="ComposePreviewDimensionRespectsLimit" enabled="true" level="WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="ComposePreviewMustBeTopLevelFunction" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="ComposePreviewNeedsComposableAnnotation" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="ComposePreviewNotSupportedInUnitTestFiles" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewDimensionRespectsLimit" enabled="true" level="WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewMustBeTopLevelFunction" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewNeedsComposableAnnotation" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="GlancePreviewNotSupportedInUnitTestFiles" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewAnnotationInFunctionWithParameters" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewApiLevelMustBeValid" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewDeviceShouldUseNewSpec" enabled="true" level="WEAK WARNING" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewFontScaleMustBeGreaterThanZero" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewMultipleParameterProviders" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewParameterProviderOnFirstParameter" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
<inspection_tool class="PreviewPickerAnnotation" enabled="true" level="ERROR" enabled_by_default="true">
<option name="composableFile" value="true" />
<option name="previewFile" value="true" />
</inspection_tool>
</profile>
</component>

View File

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="MarkdownSettings">
<option name="previewPanelProviderInfo">
<ProviderInfo name="Compose (experimental)" className="com.intellij.markdown.compose.preview.ComposePanelProvider" />
</option>
</component>
</project>

Binary file not shown.

View File

@ -0,0 +1,4 @@
kotlin version: 2.0.21
error message: The daemon has terminated unexpectedly on startup attempt #1 with error code: 0. The daemon process output:
1. Kotlin compile daemon is ready

View File

@ -0,0 +1,4 @@
kotlin version: 2.0.21
error message: The daemon has terminated unexpectedly on startup attempt #1 with error code: 0. The daemon process output:
1. Kotlin compile daemon is ready

View File

@ -0,0 +1,4 @@
kotlin version: 2.0.21
error message: The daemon has terminated unexpectedly on startup attempt #1 with error code: 0. The daemon process output:
1. Kotlin compile daemon is ready

View File

@ -2,16 +2,17 @@ plugins {
alias(libs.plugins.android.application)
alias(libs.plugins.kotlin.android)
alias(libs.plugins.kotlin.compose)
alias(libs.plugins.kotlinx.serialization)
}
android {
namespace = "com.example.animalrating"
namespace = "com.example.livingai"
compileSdk {
version = release(36)
}
defaultConfig {
applicationId = "com.example.animalrating"
applicationId = "com.example.livingai"
minSdk = 24
targetSdk = 36
versionCode = 1
@ -27,7 +28,6 @@ android {
getDefaultProguardFile("proguard-android-optimize.txt"),
"proguard-rules.pro"
)
signingConfig = signingConfigs.getByName("debug")
}
}
compileOptions {
@ -39,25 +39,46 @@ android {
}
buildFeatures {
compose = true
mlModelBinding = true
}
aaptOptions {
noCompress += "tflite"
}
}
dependencies {
val cameraxVersion = "1.5.1"
implementation(libs.androidx.paging.common)
implementation(libs.androidx.ui)
implementation(libs.androidx.window)
val cameraxVersion = "1.5.0-alpha03"
implementation("androidx.appcompat:appcompat:1.7.0")
implementation("androidx.cardview:cardview:1.0.0")
implementation("com.google.android.material:material:1.12.0")
implementation("androidx.camera:camera-core:$cameraxVersion")
implementation("androidx.camera:camera-camera2:$cameraxVersion")
implementation("androidx.camera:camera-lifecycle:$cameraxVersion")
implementation("androidx.camera:camera-view:$cameraxVersion")
implementation("androidx.camera:camera-mlkit-vision:$cameraxVersion")
// ML Kit Object Detection
//Splash Api
implementation("androidx.core:core-splashscreen:1.0.1")
implementation("androidx.camera:camera-core:${cameraxVersion}")
implementation("androidx.camera:camera-camera2:${cameraxVersion}")
implementation("androidx.camera:camera-lifecycle:${cameraxVersion}")
implementation("androidx.camera:camera-view:${cameraxVersion}")
implementation("androidx.camera:camera-video:${cameraxVersion}")
implementation("androidx.camera:camera-mlkit-vision:${cameraxVersion}")
implementation("com.google.mlkit:object-detection:17.0.2")
// ML Kit Subject Segmentation (Google Play Services version)
implementation("com.google.android.gms:play-services-mlkit-subject-segmentation:16.0.0-beta1")
// Tensorflow Lite
implementation(libs.tensorflow.lite.support)
implementation(libs.tensorflow.lite)
implementation(libs.tensorflow.lite.gpu)
implementation(libs.tensorflow.lite.task.vision)
//Koin
implementation(libs.koin.android)
implementation(libs.koin.androidx.compose)
//Navigation (Nav3 / Type-Safe Navigation)
implementation(libs.androidx.navigation.compose)
implementation(libs.kotlinx.serialization.json)
implementation(libs.androidx.core.ktx)
implementation(libs.androidx.lifecycle.runtime.ktx)
implementation(libs.androidx.activity.compose)
@ -66,6 +87,23 @@ dependencies {
implementation(libs.androidx.compose.ui.graphics)
implementation(libs.androidx.compose.ui.tooling.preview)
implementation(libs.androidx.compose.material3)
implementation(libs.androidx.datastore.preferences)
// Icons
implementation("androidx.compose.material:material-icons-extended:1.7.5")
// OpenCSV
implementation("com.opencsv:opencsv:5.7.1")
// Coil
implementation("io.coil-kt:coil-compose:2.5.0")
implementation("io.coil-kt:coil-video:2.5.0")
implementation("io.coil-kt:coil-svg:2.5.0")
// Paging
implementation("androidx.paging:paging-runtime:3.2.1")
implementation("androidx.paging:paging-compose:3.2.1")
testImplementation(libs.junit)
androidTestImplementation(libs.androidx.junit)
androidTestImplementation(libs.androidx.espresso.core)
@ -73,4 +111,4 @@ dependencies {
androidTestImplementation(libs.androidx.compose.ui.test.junit4)
debugImplementation(libs.androidx.compose.ui.tooling)
debugImplementation(libs.androidx.compose.ui.test.manifest)
}
}

View File

@ -1,4 +1,4 @@
package com.example.animalrating
package com.example.livingai
import androidx.test.platform.app.InstrumentationRegistry
import androidx.test.ext.junit.runners.AndroidJUnit4
@ -19,6 +19,6 @@ class ExampleInstrumentedTest {
fun useAppContext() {
// Context of the app under test.
val appContext = InstrumentationRegistry.getInstrumentation().targetContext
assertEquals("com.example.animalrating", appContext.packageName)
assertEquals("com.example.livingai", appContext.packageName)
}
}

BIN
app/src/main.zip Normal file

Binary file not shown.

View File

@ -2,13 +2,21 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
<uses-permission android:name="android.permission.MANAGE_EXTERNAL_STORAGE" tools:ignore="ScopedStorage" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" android:maxSdkVersion="28" />
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" android:maxSdkVersion="32" />
<!-- Permissions for Scoped Storage on Android 13+ -->
<uses-permission android:name="android.permission.READ_MEDIA_IMAGES" />
<uses-permission android:name="android.permission.READ_MEDIA_VIDEO" />
<uses-permission android:name="android.permission.READ_MEDIA_AUDIO" />
<application
android:name=".LivingAIApplication"
android:allowBackup="true"
android:dataExtractionRules="@xml/data_extraction_rules"
android:fullBackupContent="@xml/backup_rules"
@ -16,46 +24,23 @@
android:label="@string/app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.AnimalRating"
android:requestLegacyExternalStorage="true">
android:theme="@style/LivingAI.Starting.Theme">
<meta-data
android:name="com.google.mlkit.vision.DEPENDENCIES"
android:value="obj" />
<activity
android:name=".HomeActivity"
android:name=".MainActivity"
android:exported="true"
android:label="@string/app_name"
android:theme="@style/Theme.AnimalRating">
android:theme="@style/LivingAI.Starting.Theme">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity
android:name=".CowSelectionActivity"
android:exported="false"
android:theme="@style/Theme.AnimalRating" />
<activity
android:name=".CameraProcessor"
android:exported="false"
android:theme="@style/Theme.AnimalRating" />
<activity
android:name=".GalleryActivity"
android:exported="false"
android:theme="@style/Theme.AnimalRating" />
<activity
android:name=".FullScreenImageActivity"
android:exported="false"
android:theme="@style/Theme.AnimalRating" />
<activity
android:name=".RatingActivity"
android:exported="false"
android:theme="@style/Theme.AnimalRating" />
</application>
</manifest>

Binary file not shown.

View File

@ -0,0 +1,81 @@
Unknown
person
bicycle
car
motorcycle
airplane
bus
train
truck
boat
traffic light
fire hydrant
stop sign
parking meter
bench
bird
cat
dog
horse
sheep
cow
elephant
bear
zebra
giraffe
backpack
umbrella
handbag
tie
suitcase
frisbee
skis
snowboard
sports ball
kite
baseball bat
baseball glove
skateboard
surfboard
tennis racket
bottle
wine glass
cup
fork
knife
spoon
bowl
banana
apple
sandwich
orange
broccoli
carrot
hot dog
pizza
donut
cake
chair
couch
potted plant
bed
dining table
toilet
tv
laptop
mouse
remote
keyboard
cell phone
microwave
oven
toaster
sink
refrigerator
book
clock
vase
scissors
teddy bear
hair drier
toothbrush

View File

@ -1,373 +0,0 @@
package com.example.animalrating
import android.Manifest
import android.content.ContentValues
import android.content.Intent
import android.content.pm.ActivityInfo
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Color
import android.graphics.Matrix
import android.os.Build
import android.os.Bundle
import android.provider.MediaStore
import android.util.Log
import android.util.Size
import android.view.View
import android.widget.ImageView
import android.widget.Toast
import androidx.activity.result.contract.ActivityResultContracts
import androidx.appcompat.app.AppCompatActivity
import androidx.camera.core.ExperimentalGetImage
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.ImageCapture
import androidx.camera.core.ImageCaptureException
import androidx.camera.core.ImageProxy
import androidx.camera.lifecycle.ProcessCameraProvider
import androidx.camera.view.PreviewView
import androidx.core.content.ContextCompat
import com.example.animalrating.ml.CowAnalyzer
import com.example.animalrating.ui.SilhouetteOverlay
import com.google.android.material.button.MaterialButton
import java.io.File
import java.io.FileInputStream
import java.io.FileOutputStream
import java.util.concurrent.Executors
class CameraProcessor : AppCompatActivity(), CowAnalyzer.CowListener {
private lateinit var previewView: PreviewView
private lateinit var overlay: SilhouetteOverlay
private lateinit var segmentationOverlay: ImageView
private lateinit var savedMaskOverlay: ImageView
private var imageCapture: ImageCapture? = null
private lateinit var frameProcessor: FrameProcessor
private val cameraExecutor = Executors.newSingleThreadExecutor()
private var cowName: String? = null
private var orientation: String? = null
private var currentMask: Bitmap? = null
private var savedMaskBitmap: Bitmap? = null
private var isPhotoTaken = false
private var matchThreshold = 75
private var algorithm = HomeActivity.ALGORITHM_HAMMING
private var isAutoCapture = true
private var isMaskDisplayEnabled = false
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
StringProvider.initialize(this)
cowName = intent.getStringExtra("COW_NAME")
orientation = intent.getStringExtra("ORIENTATION")
// Load settings
val prefs = getSharedPreferences("AnimalRatingPrefs", MODE_PRIVATE)
matchThreshold = prefs.getInt("THRESHOLD", 75)
algorithm = prefs.getString("ALGORITHM", HomeActivity.ALGORITHM_HAMMING) ?: HomeActivity.ALGORITHM_HAMMING
isAutoCapture = prefs.getBoolean(HomeActivity.PREF_AUTO_CAPTURE, true)
isMaskDisplayEnabled = prefs.getBoolean(HomeActivity.PREF_MASK_DISPLAY, false)
// Set orientation based on selected view
if (orientation == "front" || orientation == "back") {
requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
} else {
requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
}
previewView = findViewById(R.id.cameraPreview)
overlay = findViewById(R.id.silhouetteOverlay)
segmentationOverlay = findViewById(R.id.segmentationOverlay)
savedMaskOverlay = findViewById(R.id.savedMaskOverlay)
findViewById<MaterialButton>(R.id.btnExit).setOnClickListener {
finish()
}
val btnShutter = findViewById<MaterialButton>(R.id.btnShutter)
val btnToggle = findViewById<MaterialButton>(R.id.btnToggleCaptureMode)
btnShutter.setOnClickListener {
takePhoto()
}
updateCaptureModeUI()
btnToggle.setOnClickListener {
isAutoCapture = !isAutoCapture
// Save preference for persistence if desired, or just toggle for session
prefs.edit().putBoolean(HomeActivity.PREF_AUTO_CAPTURE, isAutoCapture).apply()
updateCaptureModeUI()
}
frameProcessor = FrameProcessor()
val silhouetteId = intent.getIntExtra("SILHOUETTE_ID", 0)
overlay.setSilhouette(silhouetteId)
// Need to wait for layout to get width/height for scaling mask correctly
savedMaskOverlay.post {
loadSavedMask()
}
requestPermissionLauncher.launch(Manifest.permission.CAMERA)
}
private fun updateCaptureModeUI() {
val btnShutter = findViewById<MaterialButton>(R.id.btnShutter)
val btnToggle = findViewById<MaterialButton>(R.id.btnToggleCaptureMode)
if (isAutoCapture) {
btnShutter.visibility = View.GONE
// Auto Mode: Eye Icon, Dark Background
btnToggle.setIconResource(android.R.drawable.ic_menu_view)
btnToggle.setIconTintResource(android.R.color.white)
btnToggle.setBackgroundColor(Color.parseColor("#6D4C41"))
btnToggle.alpha = 1.0f
} else {
btnShutter.visibility = View.VISIBLE
// Manual Mode: Eye Icon (Grey/Transparent) - Requested to keep Eye icon
btnToggle.setIconResource(android.R.drawable.ic_menu_view)
btnToggle.setIconTintResource(android.R.color.darker_gray)
btnToggle.setBackgroundColor(Color.TRANSPARENT)
btnToggle.alpha = 0.7f
}
}
private fun takePhoto() {
if (isPhotoTaken) return
isPhotoTaken = true
val imageCapture = imageCapture ?: return
val name = cowName ?: "unknown"
val side = orientation ?: "unknown"
val silhouetteId = intent.getIntExtra("SILHOUETTE_ID", 0)
// Find current count for this cow and orientation
val cowFolder = StorageUtils.getCowImageFolder(name)
val existingFiles = cowFolder.listFiles { _, fname ->
fname.startsWith("${name}_${side}_") && fname.endsWith(".jpg")
}
val count = (existingFiles?.size ?: 0) + 1
if (!cowFolder.exists()) {
cowFolder.mkdirs()
}
val filename = "${name}_${side}_${count}.jpg"
val file = File(cowFolder, filename)
val outputOptions = ImageCapture.OutputFileOptions.Builder(file).build()
imageCapture.takePicture(
outputOptions,
ContextCompat.getMainExecutor(this),
object : ImageCapture.OnImageSavedCallback {
override fun onError(exc: ImageCaptureException) {
Log.e("CameraProcessor", "Photo capture failed: ${exc.message}", exc)
Toast.makeText(baseContext, StringProvider.getString("toast_capture_failed"), Toast.LENGTH_SHORT).show()
isPhotoTaken = false
}
override fun onImageSaved(output: ImageCapture.OutputFileResults) {
// Correct rotation based on current orientation
try {
val bitmap = BitmapFactory.decodeFile(file.absolutePath)
val matrix = Matrix()
val rotation = if (orientation == "front" || orientation == "back") 90f else 0f
if (rotation != 0f) {
matrix.postRotate(rotation)
val rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
FileOutputStream(file).use { out ->
rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
}
}
val retakePath = intent.getStringExtra("RETAKE_IMAGE_PATH")
if (!retakePath.isNullOrEmpty()) {
val oldFile = File(retakePath)
if (oldFile.exists()) {
oldFile.delete()
}
}
val msg = "${StringProvider.getString("toast_saved_as")} $filename"
Toast.makeText(baseContext, msg, Toast.LENGTH_SHORT).show()
// Navigate to FullScreenImageActivity
val intent = Intent(this@CameraProcessor, FullScreenImageActivity::class.java)
intent.putExtra("IMAGE_PATH", file.absolutePath)
intent.putExtra("ALLOW_RETAKE", true)
intent.putExtra("COW_NAME", cowName)
intent.putExtra("ORIENTATION", orientation)
intent.putExtra("SILHOUETTE_ID", silhouetteId)
startActivity(intent)
finish()
} catch (e: Exception) {
Log.e("CameraProcessor", "Error saving image", e)
Toast.makeText(baseContext, StringProvider.getString("toast_error_saving_image"), Toast.LENGTH_SHORT).show()
isPhotoTaken = false
}
}
}
)
}
private fun saveToGallery(file: File) {
// Removed
}
private fun loadSavedMask() {
val side = orientation ?: "unknown"
val filename = "${side}_mask.png"
val file = File(filesDir, filename)
if (file.exists()) {
try {
val savedBitmap = BitmapFactory.decodeFile(file.absolutePath)
if (savedBitmap != null) {
// Apply green color filter for visualization
if (isMaskDisplayEnabled) {
val greenMask = applyGreenColor(savedBitmap)
// Calculate scale to match FIT_CENTER logic of SilhouetteOverlay
val viewW = savedMaskOverlay.width.toFloat()
val viewH = savedMaskOverlay.height.toFloat()
if (viewW > 0 && viewH > 0) {
val bmpW = greenMask.width.toFloat()
val bmpH = greenMask.height.toFloat()
val scale = kotlin.math.min(viewW / bmpW, viewH / bmpH)
val scaledW = (bmpW * scale).toInt()
val scaledH = (bmpH * scale).toInt()
if (scaledW > 0 && scaledH > 0) {
val scaledBitmap = Bitmap.createScaledBitmap(greenMask, scaledW, scaledH, true)
savedMaskOverlay.setImageBitmap(scaledBitmap)
savedMaskOverlay.scaleType = ImageView.ScaleType.FIT_CENTER // Ensure it centers
}
} else {
// Fallback if view size not ready yet, though post() should handle it
savedMaskOverlay.setImageBitmap(greenMask)
}
savedMaskOverlay.alpha = 0.5f
} else {
savedMaskOverlay.setImageDrawable(null)
}
// Prepare mask for analysis (640x480 target)
// We should ideally match the visible part of the preview
// For simplicity, keeping original scaling logic for analysis as it might depend on full frame
// However, if the overlay is FIT_CENTER, the analysis should likely respect that aspect ratio too
// But for now, let's just fix the visual overlay as requested.
val isPortrait = (side == "front" || side == "back")
val width = if (isPortrait) 480 else 640
val height = if (isPortrait) 640 else 480
savedMaskBitmap = Bitmap.createScaledBitmap(savedBitmap, width, height, true)
}
} catch (e: Exception) {
Log.e("CameraProcessor", "Error loading saved mask", e)
}
}
}
private fun applyGreenColor(original: Bitmap): Bitmap {
val width = original.width
val height = original.height
val pixels = IntArray(width * height)
original.getPixels(pixels, 0, width, 0, 0, width, height)
for (i in pixels.indices) {
val alpha = (pixels[i] shr 24) and 0xff
if (alpha > 10) {
// Set to Green with original alpha
pixels[i] = Color.argb(alpha, 0, 255, 0)
}
}
return Bitmap.createBitmap(pixels, width, height, Bitmap.Config.ARGB_8888)
}
private val requestPermissionLauncher =
registerForActivityResult(ActivityResultContracts.RequestPermission()) { granted ->
if (granted) startCamera()
}
private fun startCamera() {
val providerFuture = ProcessCameraProvider.getInstance(this)
providerFuture.addListener({
val cameraProvider = providerFuture.get()
val preview = androidx.camera.core.Preview.Builder().build()
preview.surfaceProvider = previewView.surfaceProvider
imageCapture = ImageCapture.Builder()
.setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
.build()
val analyzer = ImageAnalysis.Builder()
.setTargetResolution(Size(640, 480))
.setBackpressureStrategy(
ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST
)
.build()
.also {
it.setAnalyzer(cameraExecutor, CowAnalyzer(this))
}
cameraProvider.unbindAll()
cameraProvider.bindToLifecycle(
this,
androidx.camera.core.CameraSelector.DEFAULT_BACK_CAMERA,
preview,
imageCapture,
analyzer
)
}, ContextCompat.getMainExecutor(this))
}
@ExperimentalGetImage
override fun onFrame(imageProxy: ImageProxy) {
if (isPhotoTaken) {
imageProxy.close()
return
}
val isPortrait = (orientation == "front" || orientation == "back")
frameProcessor.processFrame(imageProxy, savedMaskBitmap, isPortrait, matchThreshold, algorithm)
.addOnSuccessListener { result ->
runOnUiThread {
if (result.mask != null) {
currentMask = result.mask
if (isMaskDisplayEnabled) {
segmentationOverlay.setImageBitmap(result.mask)
} else {
segmentationOverlay.setImageDrawable(null)
}
}
}
if (isAutoCapture && result.isMatch) {
takePhoto()
}
}
.addOnFailureListener { e ->
Log.e("CameraProcessor", "Frame processing error", e)
}
}
}

View File

@ -1,519 +0,0 @@
package com.example.animalrating
import android.content.Intent
import android.content.pm.PackageManager
import android.graphics.Bitmap
import android.graphics.BitmapFactory
import android.graphics.Color
import android.os.Bundle
import android.view.View
import android.widget.ArrayAdapter
import android.widget.AutoCompleteTextView
import android.widget.Button
import android.widget.ImageView
import android.widget.LinearLayout
import android.widget.RadioButton
import android.widget.RadioGroup
import android.widget.TextView
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import com.google.android.material.textfield.TextInputEditText
import com.google.android.material.textfield.TextInputLayout
import java.io.File
import java.io.FileOutputStream
import java.io.FileWriter
import java.text.SimpleDateFormat
import java.util.Date
import java.util.Locale
class CowSelectionActivity : AppCompatActivity() {
private var currentCowName: String? = null
private lateinit var imagesContainer: LinearLayout
private val storagePermissionCode = 101
private val orientationViews = mutableMapOf<String, View>()
private val initialImagePaths = mutableSetOf<String>()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_cow_selection)
StringProvider.initialize(this)
setupUIStrings()
val toolbar = findViewById<androidx.appcompat.widget.Toolbar>(R.id.toolbar)
setSupportActionBar(toolbar)
supportActionBar?.setDisplayShowTitleEnabled(false)
toolbar.setNavigationOnClickListener {
finish()
}
initializeDefaultMasks()
setupDropdowns()
imagesContainer = findViewById(R.id.currentCowImagesContainer)
currentCowName = savedInstanceState?.getString("COW_NAME") ?: intent.getStringExtra("COW_NAME")
if (currentCowName == null) {
generateNewCowName()
}
loadInitialImages()
if (intent.hasExtra("COW_NAME")) {
loadCowDetails(currentCowName!!)
}
orientationViews["left"] = findViewById(R.id.btnLeft)
orientationViews["right"] = findViewById(R.id.btnRight)
orientationViews["angle"] = findViewById(R.id.btnTop)
orientationViews["front"] = findViewById(R.id.btnFront)
orientationViews["back"] = findViewById(R.id.btnBack)
orientationViews["left_angle"] = findViewById(R.id.btnLeftAngle)
orientationViews["right_angle"] = findViewById(R.id.btnRightAngle)
findViewById<Button>(R.id.btnNewCow).setOnClickListener {
if (checkStoragePermissions()) {
saveProfile()
} else {
requestStoragePermissions()
}
}
findViewById<Button>(R.id.btnCancel).setOnClickListener {
deleteSessionImages()
finish()
}
}
private fun loadInitialImages() {
val name = currentCowName ?: return
val cowFolder = StorageUtils.getCowImageFolder(name)
if (cowFolder.exists()) {
cowFolder.listFiles()?.forEach { file ->
if (file.isFile) {
initialImagePaths.add(file.absolutePath)
}
}
}
}
private fun deleteSessionImages() {
val name = currentCowName ?: return
val cowFolder = StorageUtils.getCowImageFolder(name)
if (cowFolder.exists()) {
cowFolder.listFiles()?.forEach { file ->
if (file.isFile && !initialImagePaths.contains(file.absolutePath)) {
file.delete()
}
}
val remaining = cowFolder.listFiles()
if (remaining == null || remaining.isEmpty()) {
cowFolder.delete()
}
}
}
private fun setupUIStrings() {
findViewById<TextView>(R.id.tvToolbarTitle).text = StringProvider.getString("title_cow_selection")
findViewById<TextView>(R.id.tvAddCowDetails).text = StringProvider.getString("title_add_cow_details")
// Using hint_ keys for the new labels as they contain the appropriate text (e.g. "Species", "Breed")
findViewById<TextView>(R.id.tvLabelSpecies).text = StringProvider.getString("hint_species")
findViewById<TextView>(R.id.tvLabelBreed).text = StringProvider.getString("hint_breed")
findViewById<TextView>(R.id.tvLabelAge).text = StringProvider.getString("hint_age")
findViewById<TextView>(R.id.tvLabelMilk).text = StringProvider.getString("hint_milk_yield")
findViewById<TextView>(R.id.tvLabelCalving).text = StringProvider.getString("hint_calving_number")
findViewById<TextView>(R.id.tvLabelDescription).text = StringProvider.getString("hint_description")
findViewById<TextInputLayout>(R.id.tilSpecies).hint = null
findViewById<TextInputLayout>(R.id.tilBreed).hint = null
findViewById<TextInputLayout>(R.id.tilAge).hint = null
findViewById<TextInputLayout>(R.id.tilMilk).hint = null
findViewById<TextInputLayout>(R.id.tilCalving).hint = null
findViewById<TextInputLayout>(R.id.tilDescription).hint = null
findViewById<TextView>(R.id.tvReproductiveStatus).text = StringProvider.getString("label_reproductive_status")
findViewById<RadioButton>(R.id.rbPregnant).text = StringProvider.getString("radio_pregnant")
findViewById<RadioButton>(R.id.rbCalved).text = StringProvider.getString("radio_calved")
findViewById<RadioButton>(R.id.rbNone).text = StringProvider.getString("radio_none")
findViewById<TextView>(R.id.tvUploadPhotos).text = StringProvider.getString("label_upload_photos")
findViewById<Button>(R.id.btnNewCow).text = StringProvider.getString("btn_save_profile")
findViewById<Button>(R.id.btnCancel).text = StringProvider.getString("btn_cancel")
}
private fun checkStoragePermissions(): Boolean {
return if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) {
android.os.Environment.isExternalStorageManager()
} else {
val write = ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
val read = ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE)
write == PackageManager.PERMISSION_GRANTED && read == PackageManager.PERMISSION_GRANTED
}
}
private fun requestStoragePermissions() {
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.R) {
try {
val intent = Intent(android.provider.Settings.ACTION_MANAGE_APP_ALL_FILES_ACCESS_PERMISSION)
val uri = android.net.Uri.fromParts("package", packageName, null)
intent.data = uri
startActivity(intent)
} catch (_: Exception) {
val intent = Intent(android.provider.Settings.ACTION_MANAGE_ALL_FILES_ACCESS_PERMISSION)
startActivity(intent)
}
} else {
ActivityCompat.requestPermissions(
this,
arrayOf(android.Manifest.permission.WRITE_EXTERNAL_STORAGE, android.Manifest.permission.READ_EXTERNAL_STORAGE),
storagePermissionCode
)
}
}
private fun saveProfile() {
val speciesDisplay = findViewById<AutoCompleteTextView>(R.id.spinnerSpecies).text.toString()
val breedDisplay = findViewById<AutoCompleteTextView>(R.id.spinnerBreed).text.toString()
val speciesKey = StringProvider.getKeyForValue(speciesDisplay)
val species = if (speciesKey != null) StringProvider.getStringEnglish(speciesKey) else speciesDisplay
val breedKey = StringProvider.getKeyForValue(breedDisplay)
val breed = if (breedKey != null) StringProvider.getStringEnglish(breedKey) else breedDisplay
val ageInput = findViewById<TextInputEditText>(R.id.etAge).text.toString()
val milkInput = findViewById<TextInputLayout>(R.id.tilMilk).editText?.text.toString()
val calvingInput = findViewById<TextInputLayout>(R.id.tilCalving).editText?.text.toString()
val descriptionInput = findViewById<TextInputLayout>(R.id.tilDescription).editText?.text.toString()
val rgReproductive = findViewById<RadioGroup>(R.id.rgReproductiveStatus)
val reproductiveStatusKey = when(rgReproductive?.checkedRadioButtonId) {
R.id.rbPregnant -> "radio_pregnant"
R.id.rbCalved -> "radio_calved"
R.id.rbNone -> "radio_none"
else -> null
}
val reproductiveStatus = if (reproductiveStatusKey != null) StringProvider.getStringEnglish(reproductiveStatusKey) else ""
val csvHeader = "CowID,Species,Breed,Age,MilkYield,CalvingNumber,ReproductiveStatus,Description\n"
val csvRow = "$currentCowName,$species,$breed,$ageInput,$milkInput,$calvingInput,$reproductiveStatus,$descriptionInput\n"
val docsFolder = StorageUtils.getDocumentsFolder()
val csvFile = File(docsFolder, "cow_profiles.csv")
try {
val fileExists = csvFile.exists()
val lines = if (fileExists) csvFile.readLines().toMutableList() else mutableListOf()
if (!fileExists) {
lines.add(csvHeader.trim())
}
val existingIndex = lines.indexOfFirst { it.startsWith("$currentCowName,") }
if (existingIndex != -1) {
lines[existingIndex] = csvRow.trim()
} else {
lines.add(csvRow.trim())
}
FileWriter(csvFile).use { writer ->
lines.forEach { line ->
writer.write(line + "\n")
}
}
Toast.makeText(this, StringProvider.getString("toast_profile_saved"), Toast.LENGTH_SHORT).show()
finish()
} catch (e: Exception) {
e.printStackTrace()
Toast.makeText(this, StringProvider.getString("toast_error_saving_profile") + " ${e.message}", Toast.LENGTH_SHORT).show()
}
}
private fun loadCowDetails(cowId: String) {
val docsFolder = StorageUtils.getDocumentsFolder()
val csvFile = File(docsFolder, "cow_profiles.csv")
if (!csvFile.exists()) return
try {
val lines = csvFile.readLines()
val record = lines.find { it.startsWith("$cowId,") }?.split(",") ?: return
if (record.size >= 8) {
val storedSpecies = record[1]
val speciesKey = StringProvider.getKeyForEnglishValue(storedSpecies)
val displaySpecies = if (speciesKey != null) StringProvider.getString(speciesKey) else storedSpecies
findViewById<AutoCompleteTextView>(R.id.spinnerSpecies).setText(displaySpecies, false)
val storedBreed = record[2]
val breedKey = StringProvider.getKeyForEnglishValue(storedBreed)
val displayBreed = if (breedKey != null) StringProvider.getString(breedKey) else storedBreed
findViewById<AutoCompleteTextView>(R.id.spinnerBreed).setText(displayBreed, false)
findViewById<TextInputEditText>(R.id.etAge).setText(record[3])
findViewById<TextInputLayout>(R.id.tilMilk).editText?.setText(record[4])
findViewById<TextInputLayout>(R.id.tilCalving).editText?.setText(record[5])
val storedStatus = record[6]
val statusKey = StringProvider.getKeyForEnglishValue(storedStatus)
when(statusKey) {
"radio_pregnant" -> findViewById<RadioButton>(R.id.rbPregnant).isChecked = true
"radio_calved" -> findViewById<RadioButton>(R.id.rbCalved).isChecked = true
"radio_none" -> findViewById<RadioButton>(R.id.rbNone).isChecked = true
else -> {
when (storedStatus) {
"Pregnant" -> findViewById<RadioButton>(R.id.rbPregnant).isChecked = true
"Calved" -> findViewById<RadioButton>(R.id.rbCalved).isChecked = true
"None" -> findViewById<RadioButton>(R.id.rbNone).isChecked = true
}
}
}
findViewById<TextInputLayout>(R.id.tilDescription).editText?.setText(record[7])
}
} catch (e: Exception) {
e.printStackTrace()
}
}
private fun setupDropdowns() {
val species = listOf(
StringProvider.getString("species_cow"),
StringProvider.getString("species_buffalo")
)
val speciesAdapter = ArrayAdapter(this, android.R.layout.simple_dropdown_item_1line, species)
findViewById<AutoCompleteTextView>(R.id.spinnerSpecies).setAdapter(speciesAdapter)
val breeds = listOf(
StringProvider.getString("breed_holstein"),
StringProvider.getString("breed_jersey"),
StringProvider.getString("breed_sahiwal"),
StringProvider.getString("breed_gir"),
StringProvider.getString("breed_red_sindhi"),
StringProvider.getString("breed_murrah"),
StringProvider.getString("breed_surti")
)
val breedAdapter = ArrayAdapter(this, android.R.layout.simple_dropdown_item_1line, breeds)
findViewById<AutoCompleteTextView>(R.id.spinnerBreed).setAdapter(breedAdapter)
}
override fun onResume() {
super.onResume()
refreshCowImages()
}
private fun initializeDefaultMasks() {
val orientationResources = mapOf(
"left" to R.drawable.left,
"right" to R.drawable.right,
"angle" to R.drawable.angle,
"front" to R.drawable.front,
"back" to R.drawable.back,
"leftangle" to R.drawable.leftangle,
"rightangle" to R.drawable.rightangle
)
orientationResources.forEach { (orientation, resId) ->
val filename = "${orientation}_mask.png"
val file = File(filesDir, filename)
if (!file.exists()) {
try {
val original = BitmapFactory.decodeResource(resources, resId)
if (original != null) {
val inverted = createInverseBitmask(original)
FileOutputStream(file).use { out ->
inverted.compress(Bitmap.CompressFormat.PNG, 100, out)
}
}
} catch (e: Exception) {
e.printStackTrace()
}
}
}
}
private fun createInverseBitmask(src: Bitmap): Bitmap {
val width = src.width
val height = src.height
val pixels = IntArray(width * height)
src.getPixels(pixels, 0, width, 0, 0, width, height)
for (i in pixels.indices) {
val alpha = (pixels[i] shr 24) and 0xFF
if (alpha > 0) {
pixels[i] = Color.TRANSPARENT
} else {
pixels[i] = Color.BLACK
}
}
return Bitmap.createBitmap(pixels, width, height, Bitmap.Config.ARGB_8888)
}
private fun generateNewCowName() {
val sdf = SimpleDateFormat("yyyyMMddHHmmss", Locale.getDefault())
currentCowName = "cow_${sdf.format(Date())}"
}
private fun refreshCowImages() {
val name = currentCowName ?: return
val orientations = mapOf(
"left" to Pair(R.drawable.left, R.id.btnLeft),
"right" to Pair(R.drawable.right, R.id.btnRight),
"angle" to Pair(R.drawable.angle, R.id.btnTop),
"front" to Pair(R.drawable.front, R.id.btnFront),
"back" to Pair(R.drawable.back, R.id.btnBack),
"leftangle" to Pair(R.drawable.leftangle, R.id.btnLeftAngle),
"rightangle" to Pair(R.drawable.rightangle, R.id.btnRightAngle)
)
val cowImagesFolder = StorageUtils.getCowImageFolder(name)
orientations.forEach { (orientation, pair) ->
val (drawableId, viewId) = pair
val files = if (cowImagesFolder.exists()) {
cowImagesFolder.listFiles { _, fname -> fname.startsWith("${name}_${orientation}_") && fname.endsWith(".jpg") }
} else {
null
}
val latestFile = files?.maxByOrNull { it.lastModified() }
val container = findViewById<LinearLayout>(viewId)
container.removeAllViews()
val key = when(orientation) {
"front" -> "text_front_view"
"back" -> "text_rear_view"
"left" -> "text_left_side"
"right" -> "text_right_side"
"angle" -> "text_angle_view"
"leftangle" -> "text_left_angle"
"rightangle" -> "text_right_angle"
else -> ""
}
val label = if (key.isNotEmpty()) StringProvider.getString(key) else orientation
if (latestFile != null && latestFile.exists()) {
val frameLayout = android.widget.FrameLayout(this)
frameLayout.layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.MATCH_PARENT
)
val imageView = ImageView(this)
imageView.layoutParams = android.widget.FrameLayout.LayoutParams(
android.widget.FrameLayout.LayoutParams.MATCH_PARENT,
android.widget.FrameLayout.LayoutParams.MATCH_PARENT
)
imageView.scaleType = ImageView.ScaleType.CENTER_CROP
val bitmap = BitmapFactory.decodeFile(latestFile.absolutePath)
imageView.setImageBitmap(bitmap)
val deleteBtn = ImageView(this)
val btnSize = (24 * resources.displayMetrics.density).toInt()
val btnParams = android.widget.FrameLayout.LayoutParams(btnSize, btnSize)
btnParams.gravity = android.view.Gravity.TOP or android.view.Gravity.END
val margin = (4 * resources.displayMetrics.density).toInt()
btnParams.setMargins(margin, margin, margin, margin)
deleteBtn.layoutParams = btnParams
deleteBtn.setImageResource(android.R.drawable.ic_menu_close_clear_cancel)
deleteBtn.setColorFilter(Color.RED)
deleteBtn.setBackgroundColor(Color.parseColor("#80FFFFFF"))
deleteBtn.isClickable = true
deleteBtn.setOnClickListener {
if (latestFile.delete()) {
val remainingFiles = cowImagesFolder.listFiles()
if (remainingFiles == null || remainingFiles.isEmpty()) {
cowImagesFolder.delete()
}
refreshCowImages()
}
}
val labelView = TextView(this)
labelView.text = label
labelView.textSize = 12f
labelView.setTextColor(Color.WHITE)
labelView.setShadowLayer(3f, 0f, 0f, Color.BLACK)
val labelParams = android.widget.FrameLayout.LayoutParams(
android.widget.FrameLayout.LayoutParams.WRAP_CONTENT,
android.widget.FrameLayout.LayoutParams.WRAP_CONTENT
)
labelParams.gravity = android.view.Gravity.BOTTOM or android.view.Gravity.CENTER_HORIZONTAL
labelParams.bottomMargin = (4 * resources.displayMetrics.density).toInt()
labelView.layoutParams = labelParams
frameLayout.addView(imageView)
frameLayout.addView(labelView)
frameLayout.addView(deleteBtn)
container.addView(frameLayout)
imageView.setOnClickListener {
val intent = Intent(this, FullScreenImageActivity::class.java)
intent.putExtra("IMAGE_PATH", latestFile.absolutePath)
intent.putExtra("ALLOW_RETAKE", true)
intent.putExtra("COW_NAME", currentCowName)
intent.putExtra("ORIENTATION", orientation)
intent.putExtra("SILHOUETTE_ID", drawableId)
startActivity(intent)
}
container.setOnClickListener(null)
container.isClickable = false
} else {
val iconView = ImageView(this)
val params = LinearLayout.LayoutParams(
(24 * resources.displayMetrics.density).toInt(),
(24 * resources.displayMetrics.density).toInt()
)
params.gravity = android.view.Gravity.CENTER_HORIZONTAL
iconView.layoutParams = params
iconView.setImageResource(android.R.drawable.ic_menu_camera)
iconView.setColorFilter(Color.parseColor("#5D4037"))
val textView = TextView(this)
val textParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT
)
textParams.gravity = android.view.Gravity.CENTER_HORIZONTAL
textParams.topMargin = (4 * resources.displayMetrics.density).toInt()
textView.layoutParams = textParams
textView.text = label
textView.textSize = 12f
textView.setTextColor(Color.parseColor("#5D4037"))
container.addView(iconView)
container.addView(textView)
container.setOnClickListener {
val intent = Intent(this, CameraProcessor::class.java)
intent.putExtra("SILHOUETTE_ID", drawableId)
intent.putExtra("COW_NAME", currentCowName)
intent.putExtra("ORIENTATION", orientation)
startActivity(intent)
}
container.isClickable = true
}
}
}
override fun onSaveInstanceState(outState: Bundle) {
super.onSaveInstanceState(outState)
outState.putString("COW_NAME", currentCowName)
}
}

View File

@ -1,273 +0,0 @@
package com.example.animalrating
import android.graphics.Bitmap
import android.graphics.Color
import android.graphics.Matrix
import android.util.Log
import androidx.camera.core.ExperimentalGetImage
import androidx.camera.core.ImageProxy
import com.google.android.gms.tasks.Task
import com.google.android.gms.tasks.TaskCompletionSource
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.subject.SubjectSegmentation
import com.google.mlkit.vision.segmentation.subject.SubjectSegmenterOptions
import java.util.concurrent.Executors
import java.util.concurrent.atomic.AtomicBoolean
import kotlin.math.sqrt
data class SegmentationResult(
val mask: Bitmap?,
val isMatch: Boolean
)
class FrameProcessor {
private val isProcessing = AtomicBoolean(false)
private val processingExecutor = Executors.newSingleThreadExecutor()
private val options = SubjectSegmenterOptions.Builder()
.enableMultipleSubjects(
SubjectSegmenterOptions.SubjectResultOptions.Builder()
.enableConfidenceMask()
.build()
)
.build()
private val segmenter = SubjectSegmentation.getClient(options)
@ExperimentalGetImage
fun processFrame(
imageProxy: ImageProxy,
savedMask: Bitmap?,
isPortrait: Boolean,
thresholdPercent: Int = 75,
algorithm: String = HomeActivity.ALGORITHM_HAMMING
): Task<SegmentationResult> {
val taskCompletionSource = TaskCompletionSource<SegmentationResult>()
if (!isProcessing.compareAndSet(false, true)) {
imageProxy.close()
taskCompletionSource.setResult(SegmentationResult(null, false))
return taskCompletionSource.task
}
val mediaImage = imageProxy.image
if (mediaImage == null) {
isProcessing.set(false)
imageProxy.close()
taskCompletionSource.setResult(SegmentationResult(null, false))
return taskCompletionSource.task
}
val inputImage = InputImage.fromMediaImage(mediaImage, 0)
segmenter.process(inputImage)
.addOnSuccessListener(processingExecutor) { result ->
var bitmapMask: Bitmap? = null
val subject = result.subjects.firstOrNull()
val mask = subject?.confidenceMask
if (mask != null) {
val startX = subject.startX
val startY = subject.startY
val maskWidth = subject.width
val maskHeight = subject.height
val fullWidth = inputImage.width
val fullHeight = inputImage.height
if (mask.remaining() >= maskWidth * maskHeight) {
val colors = IntArray(fullWidth * fullHeight)
mask.rewind()
for (y in 0 until maskHeight) {
for (x in 0 until maskWidth) {
if (mask.get() > 0.5f) {
val destX = startX + x
val destY = startY + y
if (destX < fullWidth && destY < fullHeight) {
colors[destY * fullWidth + destX] = Color.argb(180, 255, 0, 255)
}
}
}
}
val rawBitmap = Bitmap.createBitmap(colors, fullWidth, fullHeight, Bitmap.Config.ARGB_8888)
// Rotate and Scale if needed
bitmapMask = if (isPortrait) {
try {
val matrix = Matrix()
// Rotate 90 degrees
matrix.postRotate(90f)
// Assuming previous scaling logic might still be relevant if masks are consistently off
// But generally, we trust the geometry.
// If scaling up is happening unexpectedly, one could use:
// matrix.postScale(0.9f, 0.9f)
// For now, stick to rotation unless requested otherwise.
Bitmap.createBitmap(rawBitmap, 0, 0, rawBitmap.width, rawBitmap.height, matrix, true)
} catch (e: Exception) {
Log.e("FrameProcessor", "Error rotating mask", e)
rawBitmap
}
} else {
rawBitmap
}
} else {
Log.e("FrameProcessor", "Mask buffer size mismatch")
}
}
// Calculate match
var isMatch = false
if (bitmapMask != null && savedMask != null) {
// Scale current mask to match saved mask dimensions (640x480) for comparison
val comparisonMask = if (bitmapMask.width != savedMask.width || bitmapMask.height != savedMask.height) {
try {
Bitmap.createScaledBitmap(bitmapMask, savedMask.width, savedMask.height, true)
} catch (e: Exception) {
Log.e("FrameProcessor", "Error scaling mask for comparison", e)
null
}
} else {
bitmapMask
}
if (comparisonMask != null) {
// Map display strings to internal algorithm keys/logic
val algoKey = when(algorithm) {
StringProvider.getString("algo_euclidean") -> HomeActivity.ALGORITHM_EUCLIDEAN
StringProvider.getString("algo_jaccard") -> HomeActivity.ALGORITHM_JACCARD
else -> {
if (algorithm == HomeActivity.ALGORITHM_EUCLIDEAN || algorithm == HomeActivity.ALGORITHM_JACCARD) algorithm
else HomeActivity.ALGORITHM_HAMMING
}
}
isMatch = when (algoKey) {
HomeActivity.ALGORITHM_EUCLIDEAN -> calculateEuclideanDistance(savedMask, comparisonMask, thresholdPercent)
HomeActivity.ALGORITHM_JACCARD -> calculateJaccardSimilarity(savedMask, comparisonMask, thresholdPercent)
else -> calculateHammingDistance(savedMask, comparisonMask, thresholdPercent)
}
}
}
taskCompletionSource.setResult(SegmentationResult(bitmapMask, isMatch))
}
.addOnFailureListener { e ->
Log.e("FrameProcessor", "Subject Segmentation failed", e)
taskCompletionSource.setException(e)
}
.addOnCompleteListener { _ ->
isProcessing.set(false)
imageProxy.close()
}
return taskCompletionSource.task
}
private fun calculateHammingDistance(mask1: Bitmap, mask2: Bitmap, thresholdPercent: Int): Boolean {
if (mask1.width != mask2.width || mask1.height != mask2.height) {
return false
}
val width = mask1.width
val height = mask1.height
val pixels1 = IntArray(width * height)
val pixels2 = IntArray(width * height)
mask1.getPixels(pixels1, 0, width, 0, 0, width, height)
mask2.getPixels(pixels2, 0, width, 0, 0, width, height)
var distance = 0
for (i in pixels1.indices) {
val isSet1 = (pixels1[i] ushr 24) > 0
val isSet2 = (pixels2[i] ushr 24) > 0
if (isSet1 != isSet2) {
distance++
}
}
val totalPixels = width * height
val validThreshold = thresholdPercent.coerceIn(1, 100)
val allowedDistance = (totalPixels.toLong() * (100 - validThreshold)) / 100
return distance <= allowedDistance
}
private fun calculateEuclideanDistance(mask1: Bitmap, mask2: Bitmap, thresholdPercent: Int): Boolean {
if (mask1.width != mask2.width || mask1.height != mask2.height) {
return false
}
val width = mask1.width
val height = mask1.height
val pixels1 = IntArray(width * height)
val pixels2 = IntArray(width * height)
mask1.getPixels(pixels1, 0, width, 0, 0, width, height)
mask2.getPixels(pixels2, 0, width, 0, 0, width, height)
var sumSq = 0L
for (i in pixels1.indices) {
// Simple binary comparison for Euclidean distance on masks
// Treat existence of pixel as 255, non-existence as 0
val val1 = if ((pixels1[i] ushr 24) > 0) 255 else 0
val val2 = if ((pixels2[i] ushr 24) > 0) 255 else 0
val diff = val1 - val2
sumSq += diff * diff
}
val euclideanDistance = sqrt(sumSq.toDouble())
val maxDistance = sqrt((width * height).toDouble()) * 255.0
val validThreshold = thresholdPercent.coerceIn(1, 100)
val allowedDistance = maxDistance * (100 - validThreshold) / 100.0
return euclideanDistance <= allowedDistance
}
private fun calculateJaccardSimilarity(mask1: Bitmap, mask2: Bitmap, thresholdPercent: Int): Boolean {
if (mask1.width != mask2.width || mask1.height != mask2.height) {
return false
}
val width = mask1.width
val height = mask1.height
val pixels1 = IntArray(width * height)
val pixels2 = IntArray(width * height)
mask1.getPixels(pixels1, 0, width, 0, 0, width, height)
mask2.getPixels(pixels2, 0, width, 0, 0, width, height)
var intersection = 0
var union = 0
for (i in pixels1.indices) {
// mask1 is Saved Mask (Likely Inverted in StorageUtils: Subject is Transparent/0, Background is Black/255)
// But check how it's loaded. In CameraProcessor: loadSavedMask() -> decodeFile().
// If we assume saved mask is inverted (subject transparent), then alpha < 128 is subject.
val alpha1 = (pixels1[i] ushr 24) and 0xFF
val isSubject1 = alpha1 < 128
// mask2 is Live Mask (Subject is Magenta/180 alpha)
val alpha2 = (pixels2[i] ushr 24) and 0xFF
val isSubject2 = alpha2 > 0
if (isSubject1 && isSubject2) {
intersection++
}
if (isSubject1 || isSubject2) {
union++
}
}
if (union == 0) return false
val jaccardIndex = (intersection.toDouble() / union.toDouble()) * 100
return jaccardIndex >= thresholdPercent
}
}

View File

@ -1,62 +0,0 @@
package com.example.animalrating
import android.content.Intent
import android.graphics.BitmapFactory
import android.os.Bundle
import android.view.View
import android.widget.Button
import android.widget.ImageButton
import android.widget.ImageView
import androidx.appcompat.app.AppCompatActivity
import java.io.File
class FullScreenImageActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_full_screen_image)
StringProvider.initialize(this)
val imagePath = intent.getStringExtra("IMAGE_PATH")
val allowRetake = intent.getBooleanExtra("ALLOW_RETAKE", false)
val cowName = intent.getStringExtra("COW_NAME")
val orientation = intent.getStringExtra("ORIENTATION")
val silhouetteId = intent.getIntExtra("SILHOUETTE_ID", 0)
if (imagePath != null) {
val file = File(imagePath)
if (file.exists()) {
val bitmap = BitmapFactory.decodeFile(file.absolutePath)
findViewById<ImageView>(R.id.fullScreenImageView).setImageBitmap(bitmap)
}
}
val btnRetake = findViewById<Button>(R.id.btnRetake)
if (allowRetake && imagePath != null) {
btnRetake.visibility = View.VISIBLE
btnRetake.text = "Retake Photo"
btnRetake.setOnClickListener {
// Launch camera to retake. Pass the current image path so CameraProcessor can overwrite/delete it on success.
val intent = Intent(this, CameraProcessor::class.java)
intent.putExtra("SILHOUETTE_ID", silhouetteId)
intent.putExtra("COW_NAME", cowName)
intent.putExtra("ORIENTATION", orientation)
intent.putExtra("RETAKE_IMAGE_PATH", imagePath) // Pass image path to replace
startActivity(intent)
finish()
}
} else {
btnRetake.visibility = View.GONE
}
val btnBack = findViewById<ImageButton>(R.id.btnBack)
btnBack.contentDescription = StringProvider.getString("content_desc_back")
btnBack.setOnClickListener {
finish()
}
findViewById<ImageView>(R.id.fullScreenImageView).contentDescription = StringProvider.getString("content_desc_full_screen_image")
}
}

View File

@ -1,400 +0,0 @@
package com.example.animalrating
import android.content.Intent
import android.content.res.ColorStateList
import android.graphics.BitmapFactory
import android.os.Bundle
import android.view.Gravity
import android.widget.ImageView
import android.widget.LinearLayout
import android.widget.TextView
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.cardview.widget.CardView
import com.google.android.material.button.MaterialButton
import com.google.android.material.floatingactionbutton.FloatingActionButton
import java.io.File
import java.io.FileWriter
import java.util.Locale
class GalleryActivity : AppCompatActivity() {
private lateinit var container: LinearLayout
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_gallery)
StringProvider.initialize(this)
val toolbar = findViewById<androidx.appcompat.widget.Toolbar>(R.id.toolbar)
setSupportActionBar(toolbar)
supportActionBar?.setDisplayHomeAsUpEnabled(true)
supportActionBar?.setDisplayShowHomeEnabled(true)
supportActionBar?.setDisplayShowTitleEnabled(false)
findViewById<TextView>(R.id.tvToolbarTitle)?.text = StringProvider.getString("title_gallery")
toolbar.setNavigationOnClickListener {
finish()
}
container = findViewById(R.id.galleryContainer)
findViewById<FloatingActionButton>(R.id.fabAddCow).setOnClickListener {
val intent = Intent(this, CowSelectionActivity::class.java)
startActivity(intent)
}
refreshGallery()
}
private fun refreshGallery() {
container.removeAllViews()
val imagesBaseFolder = StorageUtils.getImagesBaseFolder()
val cowFolders = imagesBaseFolder.listFiles { file -> file.isDirectory } ?: emptyArray()
val cowNamesFromFolders = cowFolders.map { it.name }
val docsFolder = StorageUtils.getDocumentsFolder()
val csvFile = File(docsFolder, "cow_profiles.csv")
val cowDetails = if (csvFile.exists()) {
csvFile.readLines().associate { line ->
val parts = line.split(",")
if (parts.isNotEmpty()) parts[0] to parts else "" to emptyList()
}
} else {
emptyMap()
}
val allCowNames = (cowDetails.keys + cowNamesFromFolders).filter { it.isNotEmpty() && it != "CowID" }.distinct()
allCowNames.forEach { cowName ->
val details = cowDetails[cowName] ?: emptyList()
val cowImageFolder = StorageUtils.getCowImageFolder(cowName)
val cowFiles = cowImageFolder.listFiles { _, name -> name.endsWith(".jpg") }?.toList() ?: emptyList()
addCowSection(cowName, cowFiles, details)
}
}
private fun addCowSection(cowName: String, cowFiles: List<File>, details: List<String>) {
// Main Card
val card = CardView(this).apply {
layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT
).apply {
setMargins(0, 0, 0, 24)
}
radius = 16 * resources.displayMetrics.density
cardElevation = 2 * resources.displayMetrics.density
setCardBackgroundColor(android.graphics.Color.WHITE)
}
// Horizontal Container (3:1 split)
val horizontalContainer = LinearLayout(this).apply {
orientation = LinearLayout.HORIZONTAL
layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT
)
weightSum = 4f
setPadding(24, 24, 24, 24)
}
// Left Layout (Info) - Weight 3
val leftLayout = LinearLayout(this).apply {
orientation = LinearLayout.VERTICAL
layoutParams = LinearLayout.LayoutParams(
0,
LinearLayout.LayoutParams.WRAP_CONTENT,
3f
).apply {
marginEnd = 16
}
}
val nameView = TextView(this).apply {
text = if (details.isNotEmpty()) "${StringProvider.getString("text_cow_id")} $cowName" else cowName
textSize = 20f
setTypeface(null, android.graphics.Typeface.BOLD)
setTextColor(android.graphics.Color.parseColor("#3E2723"))
layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT
).apply {
bottomMargin = 8
}
}
leftLayout.addView(nameView)
if (details.size >= 7) {
// Translate values for display
val storedSpecies = details.getOrElse(1) { "-" }
val speciesKey = StringProvider.getKeyForEnglishValue(storedSpecies)
val displaySpecies = if (speciesKey != null) StringProvider.getString(speciesKey) else storedSpecies
val storedBreed = details.getOrElse(2) { "-" }
val breedKey = StringProvider.getKeyForEnglishValue(storedBreed)
val displayBreed = if (breedKey != null) StringProvider.getString(breedKey) else storedBreed
val storedStatus = details.getOrElse(6) { "-" }
val statusKey = StringProvider.getKeyForEnglishValue(storedStatus)
val displayStatus = if (statusKey != null) StringProvider.getString(statusKey) else storedStatus
val infoText = StringBuilder()
infoText.append("${StringProvider.getString("label_species")} $displaySpecies ")
infoText.append("${StringProvider.getString("label_breed")} $displayBreed\n")
infoText.append("${StringProvider.getString("label_age")} ${details.getOrElse(3) { "-" }} ${StringProvider.getString("unit_years")} ")
infoText.append("${StringProvider.getString("label_milk_yield")} ${details.getOrElse(4) { "-" }} ${StringProvider.getString("unit_liters")}\n")
infoText.append("${StringProvider.getString("label_calving_no")} ${details.getOrElse(5) { "-" }} ")
infoText.append("${StringProvider.getString("label_status")} $displayStatus")
val detailsView = TextView(this).apply {
text = infoText.toString()
textSize = 14f
setTextColor(android.graphics.Color.parseColor("#5D4037"))
setLineSpacing(10f, 1f)
}
leftLayout.addView(detailsView)
} else {
val detailsView = TextView(this).apply {
text = "No details available"
textSize = 14f
setTextColor(android.graphics.Color.parseColor("#5D4037"))
}
leftLayout.addView(detailsView)
}
horizontalContainer.addView(leftLayout)
// Right Layout (Buttons) - Weight 1
val rightLayout = LinearLayout(this).apply {
orientation = LinearLayout.VERTICAL
layoutParams = LinearLayout.LayoutParams(
0,
LinearLayout.LayoutParams.WRAP_CONTENT,
1f
)
gravity = Gravity.CENTER_VERTICAL
}
val buttonParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
(48 * resources.displayMetrics.density).toInt()
).apply {
bottomMargin = (8 * resources.displayMetrics.density).toInt()
}
val editButton = MaterialButton(this).apply {
text = StringProvider.getString("btn_edit")
textSize = 12f
setTypeface(null, android.graphics.Typeface.BOLD)
setTextColor(android.graphics.Color.parseColor("#5D4037"))
cornerRadius = (12 * resources.displayMetrics.density).toInt()
backgroundTintList = ColorStateList.valueOf(android.graphics.Color.parseColor("#EFEBE9"))
strokeWidth = (1 * resources.displayMetrics.density).toInt()
strokeColor = ColorStateList.valueOf(android.graphics.Color.parseColor("#5D4037"))
layoutParams = buttonParams
insetTop = 0
insetBottom = 0
minHeight = 0
minimumHeight = 0
setOnClickListener {
val intent = Intent(this@GalleryActivity, CowSelectionActivity::class.java)
intent.putExtra("COW_NAME", cowName)
startActivity(intent)
}
}
val rateButton = MaterialButton(this).apply {
text = "Rate" // Add string if needed, skipping as per instruction to be concise unless requested
textSize = 12f
setTypeface(null, android.graphics.Typeface.BOLD)
setTextColor(android.graphics.Color.WHITE)
cornerRadius = (12 * resources.displayMetrics.density).toInt()
backgroundTintList = ColorStateList.valueOf(android.graphics.Color.parseColor("#6D4C41"))
layoutParams = buttonParams
insetTop = 0
insetBottom = 0
minHeight = 0
minimumHeight = 0
setOnClickListener {
val intent = Intent(this@GalleryActivity, RatingActivity::class.java)
intent.putExtra("COW_NAME", cowName)
startActivity(intent)
}
}
val deleteButton = MaterialButton(this).apply {
text = "Delete"
textSize = 12f
setTypeface(null, android.graphics.Typeface.BOLD)
setTextColor(android.graphics.Color.WHITE)
cornerRadius = (12 * resources.displayMetrics.density).toInt()
backgroundTintList = ColorStateList.valueOf(android.graphics.Color.RED)
layoutParams = buttonParams
insetTop = 0
insetBottom = 0
minHeight = 0
minimumHeight = 0
setOnClickListener {
deleteCow(cowName)
}
}
rightLayout.addView(editButton)
rightLayout.addView(rateButton)
rightLayout.addView(deleteButton)
horizontalContainer.addView(rightLayout)
card.addView(horizontalContainer)
container.addView(card)
// Orientation Images
if (cowFiles.isNotEmpty()) {
// Instead of separating by orientation, put all images in a grid
val gridLayout = android.widget.GridLayout(this).apply {
columnCount = 3
layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT
).apply {
setMargins(16, 0, 16, 16)
}
}
// Sort images or keep them in order. The user asked for 3 in each row.
cowFiles.forEach { file ->
val thumbnailView = layoutInflater.inflate(R.layout.item_image_thumbnail, gridLayout, false)
val imageView = thumbnailView.findViewById<ImageView>(R.id.ivThumbnail)
val labelView = thumbnailView.findViewById<TextView>(R.id.tvOrientationLabel)
val deleteButtonSmall = thumbnailView.findViewById<android.view.View>(R.id.btnDelete)
// Optionally set layout params for thumbnailView to ensure 3 per row
val displayMetrics = resources.displayMetrics
val screenWidth = displayMetrics.widthPixels
val itemWidth = (screenWidth - (48 * displayMetrics.density).toInt()) / 3
thumbnailView.layoutParams = android.widget.GridLayout.LayoutParams().apply {
width = itemWidth
height = itemWidth // Square
setMargins(4, 4, 4, 4)
}
// Extract orientation from filename
val parts = file.name.split("_")
var orientation = ""
if (parts.size >= 3) {
// format: cow_..._orientation_timestamp.jpg
// or cow_timestamp_orientation_...
// Let's guess. In CowSelectionActivity: ${name}_${orientation}_...
// name = cow_...
// so filename starts with name
// Let's just try to find a known orientation in the filename
val orientations = listOf("left", "right", "angle", "front", "back", "leftangle", "rightangle")
orientation = orientations.find { file.name.contains("_${it}_") } ?: ""
}
val key = when(orientation) {
"front" -> "text_front_view"
"back" -> "text_rear_view"
"left" -> "text_left_side"
"right" -> "text_right_side"
"angle" -> "text_angle_view"
"leftangle" -> "text_left_angle"
"rightangle" -> "text_right_angle"
else -> ""
}
val label = if (key.isNotEmpty()) StringProvider.getString(key) else orientation
labelView.text = label
imageView.setImageBitmap(BitmapFactory.decodeFile(file.absolutePath))
imageView.scaleType = ImageView.ScaleType.CENTER_CROP
imageView.setOnClickListener {
val intent = Intent(this@GalleryActivity, FullScreenImageActivity::class.java)
intent.putExtra("IMAGE_PATH", file.absolutePath)
startActivity(intent)
}
deleteButtonSmall.setOnClickListener {
if (file.delete()) {
val parentDir = file.parentFile
if (parentDir != null && parentDir.exists()) {
val remaining = parentDir.listFiles()
if (remaining == null || remaining.isEmpty()) {
parentDir.delete()
}
}
Toast.makeText(this@GalleryActivity, StringProvider.getString("toast_image_deleted"), Toast.LENGTH_SHORT).show()
refreshGallery()
} else {
Toast.makeText(this@GalleryActivity, StringProvider.getString("toast_error_deleting_image"), Toast.LENGTH_SHORT).show()
}
}
gridLayout.addView(thumbnailView)
}
container.addView(gridLayout)
val separator = android.view.View(this).apply {
layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
2
).apply {
setMargins(0, 16, 0, 16)
}
setBackgroundColor(android.graphics.Color.LTGRAY)
}
container.addView(separator)
}
}
private fun deleteCow(cowName: String) {
// Delete Images
val imageFolder = StorageUtils.getCowImageFolder(cowName)
if (imageFolder.exists()) {
imageFolder.deleteRecursively()
}
val docsFolder = StorageUtils.getDocumentsFolder()
// Delete Profile
val profileFile = File(docsFolder, "cow_profiles.csv")
if (profileFile.exists()) {
val lines = profileFile.readLines()
val newLines = lines.filter { !it.startsWith("$cowName,") }
FileWriter(profileFile).use { writer ->
newLines.forEach { writer.write(it + "\n") }
}
}
// Delete Ratings
val ratingsFile = File(docsFolder, "cow_ratings.csv")
if (ratingsFile.exists()) {
val lines = ratingsFile.readLines()
val newLines = lines.filter { !it.startsWith("$cowName,") }
FileWriter(ratingsFile).use { writer ->
newLines.forEach { writer.write(it + "\n") }
}
}
Toast.makeText(this, "Cow profile deleted", Toast.LENGTH_SHORT).show()
refreshGallery()
}
override fun onResume() {
super.onResume()
refreshGallery()
}
}

View File

@ -1,336 +0,0 @@
package com.example.animalrating
import android.content.Intent
import android.content.pm.PackageManager
import android.content.res.ColorStateList
import android.graphics.Color
import android.os.Build
import android.os.Bundle
import android.view.View
import android.widget.AdapterView
import android.widget.ArrayAdapter
import android.widget.ImageButton
import android.widget.ImageView
import android.widget.PopupMenu
import android.widget.SeekBar
import android.widget.Spinner
import android.widget.TextView
import androidx.appcompat.app.AppCompatActivity
import androidx.cardview.widget.CardView
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import androidx.core.view.GravityCompat
import androidx.drawerlayout.widget.DrawerLayout
import com.google.android.material.button.MaterialButton
import com.google.android.material.switchmaterial.SwitchMaterial
class HomeActivity : AppCompatActivity() {
companion object {
const val ALGORITHM_HAMMING = "Hamming Distance"
const val ALGORITHM_EUCLIDEAN = "Euclidean Distance"
const val ALGORITHM_JACCARD = "Jaccard Similarity"
private const val PERMISSION_REQUEST_CODE = 101
private const val PREF_COW_ILLUSTRATION_INDEX = "COW_ILLUSTRATION_INDEX"
const val PREF_AUTO_CAPTURE = "AUTO_CAPTURE_ENABLED"
const val PREF_DEBUG_ENABLE = "AUTO_DEBUG_ENABLED"
const val PREF_MASK_DISPLAY = "MASK_DISPLAY_ENABLED"
var IS_RELEASE_BUILD = true // Set to true for release
}
private val internalAlgorithms = listOf(ALGORITHM_HAMMING, ALGORITHM_EUCLIDEAN, ALGORITHM_JACCARD)
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_home)
StringProvider.initialize(this)
setupUI()
checkAndRequestPermissions()
}
private fun checkAndRequestPermissions() {
val permissions = mutableListOf<String>()
if (ContextCompat.checkSelfPermission(this, android.Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
permissions.add(android.Manifest.permission.CAMERA)
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.R) {
// Android 11+: Request All Files Access for managing external storage
if (!android.os.Environment.isExternalStorageManager()) {
try {
val intent = Intent(android.provider.Settings.ACTION_MANAGE_APP_ALL_FILES_ACCESS_PERMISSION)
val uri = android.net.Uri.fromParts("package", packageName, null)
intent.data = uri
startActivity(intent)
} catch (_: Exception) {
val intent = Intent(android.provider.Settings.ACTION_MANAGE_ALL_FILES_ACCESS_PERMISSION)
startActivity(intent)
}
}
} else {
// Android 10 and below
if (ContextCompat.checkSelfPermission(this, android.Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
permissions.add(android.Manifest.permission.WRITE_EXTERNAL_STORAGE)
}
if (ContextCompat.checkSelfPermission(this, android.Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
permissions.add(android.Manifest.permission.READ_EXTERNAL_STORAGE)
}
}
if (permissions.isNotEmpty()) {
ActivityCompat.requestPermissions(this, permissions.toTypedArray(), PERMISSION_REQUEST_CODE)
}
}
private fun setupUI() {
val prefs = getSharedPreferences("AnimalRatingPrefs", MODE_PRIVATE)
// Menu Button Logic
val drawerLayout = findViewById<DrawerLayout>(R.id.drawer_layout)
val btnMenu = findViewById<ImageButton>(R.id.btnMenu)
btnMenu?.setOnClickListener {
drawerLayout.openDrawer(GravityCompat.START)
}
// Language Spinner
val languageSpinner = findViewById<Spinner>(R.id.spinnerLanguage)
val languages = StringProvider.getLanguages()
val languageAdapter = ArrayAdapter(this, android.R.layout.simple_spinner_item, languages)
languageAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
languageSpinner.adapter = languageAdapter
val savedLang = prefs.getString("LANGUAGE", "English")
languageSpinner.setSelection(languages.indexOf(savedLang))
languageSpinner.onItemSelectedListener = object : AdapterView.OnItemSelectedListener {
override fun onItemSelected(parent: AdapterView<*>?, view: View?, position: Int, id: Long) {
val selectedLanguage = languages[position]
val currentLang = prefs.getString("LANGUAGE", "English")
// Only update and recreate if language actually changed
if (selectedLanguage != currentLang) {
saveSettings() // Save UI state so it's not lost
StringProvider.setLanguage(selectedLanguage, this@HomeActivity)
// Post recreate to avoid WindowLeaked (allows spinner popup to close)
android.os.Handler(android.os.Looper.getMainLooper()).post {
recreate()
}
}
}
override fun onNothingSelected(parent: AdapterView<*>?) {}
}
// Set text from StringProvider
findViewById<TextView>(R.id.tvTitle).text = StringProvider.getString("app_name")
findViewById<TextView>(R.id.tvSubtitle).text = StringProvider.getString("subtitle_home")
findViewById<MaterialButton>(R.id.btnViewGallery).text = StringProvider.getString("btn_view_gallery")
findViewById<MaterialButton>(R.id.btnSelectCow).text = StringProvider.getString("btn_select_cow")
findViewById<TextView>(R.id.tvAlgorithmLabel).text = StringProvider.getString("label_algorithm")
findViewById<TextView>(R.id.tvThresholdLabel).text = StringProvider.getString("label_match_threshold")
// Cow Illustration and Logic
val ivCowIllustration = findViewById<ImageView>(R.id.ivCowIllustration)
val savedIndex = prefs.getInt(PREF_COW_ILLUSTRATION_INDEX, 0)
setCowIllustration(ivCowIllustration, savedIndex)
ivCowIllustration.setOnClickListener { view ->
showIllustrationPopup(view, ivCowIllustration)
}
// Navigation buttons
findViewById<MaterialButton>(R.id.btnViewGallery).setOnClickListener {
startActivity(Intent(this, GalleryActivity::class.java))
}
findViewById<MaterialButton>(R.id.btnSelectCow).setOnClickListener {
// if (!IS_RELEASE_BUILD) { // This logic seemed wrong in previous thought, we want to start activity regardless?
// Ah, user said "dont show the settings panel". It doesn't mean disable "Add Cow".
// But previously I wrote logic to save settings before start.
if (!IS_RELEASE_BUILD) {
saveSettings()
}
startActivity(Intent(this, CowSelectionActivity::class.java))
}
// Auto Capture Toggle
val switchAutoCapture = findViewById<SwitchMaterial>(R.id.switchAutoCapture)
switchAutoCapture.text = StringProvider.getString("label_auto_capture")
val autoCaptureEnabled = prefs.getBoolean(PREF_AUTO_CAPTURE, false)
switchAutoCapture.isChecked = autoCaptureEnabled
val states = arrayOf(
intArrayOf(android.R.attr.state_checked),
intArrayOf(-android.R.attr.state_checked)
)
val thumbColors = intArrayOf(
Color.parseColor("#6D4C41"), // Dark Brown for On
Color.GRAY // Grey for Off
)
val trackColors = intArrayOf(
Color.parseColor("#8D6E63"), // Lighter Brown for On track
Color.LTGRAY // Light Grey for Off track
)
switchAutoCapture.thumbTintList = ColorStateList(states, thumbColors)
switchAutoCapture.trackTintList = ColorStateList(states, trackColors)
switchAutoCapture.setOnCheckedChangeListener { _, isChecked ->
prefs.edit().putBoolean(PREF_AUTO_CAPTURE, isChecked).apply()
}
val switchEnableDebug = findViewById<SwitchMaterial>(R.id.switchEnableDebug)
switchEnableDebug.text = StringProvider.getString("label_enable_debug")
val debugEnabled = prefs.getBoolean(PREF_DEBUG_ENABLE, false)
switchEnableDebug.isChecked = debugEnabled
IS_RELEASE_BUILD = !debugEnabled
switchEnableDebug.thumbTintList = ColorStateList(states, thumbColors)
switchEnableDebug.trackTintList = ColorStateList(states, trackColors)
switchEnableDebug.setOnCheckedChangeListener { _, isChecked ->
prefs.edit().putBoolean(PREF_DEBUG_ENABLE, isChecked).apply()
IS_RELEASE_BUILD = !isChecked
if (IS_RELEASE_BUILD) {
findViewById<CardView>(R.id.viewSettings).visibility = View.GONE
} else {
saveSettings()
findViewById<CardView>(R.id.viewSettings).visibility = View.VISIBLE
}
}
val headerSettings = findViewById<TextView>(R.id.tvHeaderSettings)
if (headerSettings != null) headerSettings.text = StringProvider.getString("header_settings")
// Algorithm Spinner
val spinner = findViewById<Spinner>(R.id.spinnerAlgorithm)
val displayAlgorithms = listOf(
StringProvider.getString("algo_hamming"),
StringProvider.getString("algo_euclidean"),
StringProvider.getString("algo_jaccard")
)
val adapter = ArrayAdapter(this, android.R.layout.simple_spinner_item, displayAlgorithms)
adapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item)
spinner.adapter = adapter
// Set default selection from preferences or intent
val savedAlg = prefs.getString("ALGORITHM", ALGORITHM_HAMMING)
val index = internalAlgorithms.indexOf(savedAlg)
spinner.setSelection(if (index >= 0) index else 0)
// Threshold SeekBar
val seekBar = findViewById<SeekBar>(R.id.seekBarThreshold)
val tvThreshold = findViewById<TextView>(R.id.tvThresholdValue)
val savedThreshold = prefs.getInt("THRESHOLD", 75)
seekBar.progress = savedThreshold
tvThreshold.text = "$savedThreshold%"
seekBar.setOnSeekBarChangeListener(object : SeekBar.OnSeekBarChangeListener {
override fun onProgressChanged(seekBar: SeekBar?, progress: Int, fromUser: Boolean) {
tvThreshold.text = "$progress%"
}
override fun onStartTrackingTouch(seekBar: SeekBar?) {}
override fun onStopTrackingTouch(seekBar: SeekBar?) {}
})
// Mask Display Toggle
val switchMaskDisplay = findViewById<SwitchMaterial>(R.id.switchMaskDisplay)
switchMaskDisplay.text = StringProvider.getString("label_mask_display")
val maskDisplayEnabled = prefs.getBoolean(PREF_MASK_DISPLAY, false)
switchMaskDisplay.isChecked = maskDisplayEnabled
switchMaskDisplay.thumbTintList = ColorStateList(states, thumbColors)
switchMaskDisplay.trackTintList = ColorStateList(states, trackColors)
switchMaskDisplay.setOnCheckedChangeListener { _, isChecked ->
prefs.edit().putBoolean(PREF_MASK_DISPLAY, isChecked).apply()
}
// Initial visibility based on IS_RELEASE_BUILD
if (IS_RELEASE_BUILD) {
val settingsView = findViewById<CardView>(R.id.viewSettings)
settingsView.visibility = View.GONE
// Force defaults if needed, but logic above already sets defaults from prefs or code constants.
// If user wants to FORCE defaults every time release build runs:
/*
prefs.edit().putString("ALGORITHM", ALGORITHM_JACCARD).apply()
prefs.edit().putInt("THRESHOLD", 88).apply()
prefs.edit().putBoolean(PREF_MASK_DISPLAY, false).apply()
*/
// Leaving as is based on previous logic.
} else {
findViewById<CardView>(R.id.viewSettings).visibility = View.VISIBLE
}
}
private fun showIllustrationPopup(anchor: View, imageView: ImageView) {
val popup = PopupMenu(this, anchor)
for (i in 0..4) {
popup.menu.add(0, i, i, "Illustration $i")
}
popup.setOnMenuItemClickListener { item ->
val index = item.itemId
setCowIllustration(imageView, index)
// Save preference
getSharedPreferences("AnimalRatingPrefs", MODE_PRIVATE).edit()
.putInt(PREF_COW_ILLUSTRATION_INDEX, index)
.apply()
true
}
popup.show()
}
private fun setCowIllustration(imageView: ImageView, index: Int) {
val resName = "cow_illustration_$index"
val resId = resources.getIdentifier(resName, "drawable", packageName)
if (resId != 0) {
imageView.setImageResource(resId)
} else {
if (index == 0) {
val defaultId = resources.getIdentifier("cow_illustration", "drawable", packageName)
if (defaultId != 0) imageView.setImageResource(defaultId)
}
}
}
private fun saveSettings() {
val spinner = findViewById<Spinner>(R.id.spinnerAlgorithm)
val seekBar = findViewById<SeekBar>(R.id.seekBarThreshold)
val showSegmentationMask = findViewById<SwitchMaterial>(R.id.switchMaskDisplay)
if (spinner != null && seekBar != null && showSegmentationMask != null) {
val selectedIndex = spinner.selectedItemPosition
val selectedAlgorithm = if (selectedIndex >= 0 && selectedIndex < internalAlgorithms.size) {
internalAlgorithms[selectedIndex]
} else {
ALGORITHM_HAMMING
}
val threshold = seekBar.progress
// Save to preferences
val prefs = getSharedPreferences("AnimalRatingPrefs", MODE_PRIVATE)
val showMask = prefs.getBoolean(PREF_MASK_DISPLAY, false)
prefs.edit().apply {
putString("ALGORITHM", selectedAlgorithm)
putInt("THRESHOLD", threshold)
putBoolean(PREF_MASK_DISPLAY, showMask)
apply()
}
}
}
private fun saveSettingsAndStart() {
if(!IS_RELEASE_BUILD)
saveSettings()
startActivity(Intent(this, CowSelectionActivity::class.java))
}
}

View File

@ -1,328 +0,0 @@
package com.example.animalrating
import android.content.Intent
import android.graphics.BitmapFactory
import android.graphics.Color
import android.graphics.drawable.GradientDrawable
import android.os.Bundle
import android.view.Gravity
import android.widget.ImageView
import android.widget.LinearLayout
import android.widget.TextView
import android.widget.Toast
import androidx.appcompat.app.AppCompatActivity
import androidx.core.content.ContextCompat
import com.google.android.material.button.MaterialButton
import com.google.android.material.textfield.TextInputLayout
import java.io.File
import java.io.FileWriter
import java.util.Locale
class RatingActivity : AppCompatActivity() {
private lateinit var currentCowName: String
private lateinit var ratingsContainer: LinearLayout
// Feature list (internal keys)
private val features = listOf(
"Stature", "Chest width", "Body depth", "Angularity",
"Rump angle", "Rump width", "Rear legs set", "Rear legs rear view",
"Foot angle", "Fore udder attachment", "Rear udder height",
"Central ligament", "Udder depth", "Front teat position",
"Teat length", "Rear teat position", "Locomotion",
"Body condition score", "Hock development", "Bone structure",
"Rear udder width", "Teat thickness", "Muscularity"
)
private val ratingsMap = mutableMapOf<String, Int>()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_rating)
StringProvider.initialize(this)
currentCowName = intent.getStringExtra("COW_NAME") ?: run {
finish()
return
}
val toolbar = findViewById<androidx.appcompat.widget.Toolbar>(R.id.toolbar)
setSupportActionBar(toolbar)
supportActionBar?.setDisplayShowTitleEnabled(false)
toolbar.setNavigationOnClickListener { finish() }
findViewById<TextView>(R.id.tvToolbarTitle)?.text = StringProvider.getString("title_rate_cow")
setupUIStrings()
loadCowDetails()
loadCowImages()
setupRatingSection()
loadExistingRatings()
findViewById<MaterialButton>(R.id.btnSaveRating).setOnClickListener {
saveRatings()
}
findViewById<MaterialButton>(R.id.btnCancelRating).setOnClickListener {
finish()
}
}
private fun setupUIStrings() {
findViewById<TextView>(R.id.tvHeaderPhotos)?.text = StringProvider.getString("header_photos")
findViewById<TextView>(R.id.tvHeaderCowDetails)?.text = StringProvider.getString("header_cow_details")
findViewById<TextView>(R.id.tvHeaderFeatureRatings)?.text = StringProvider.getString("header_feature_ratings")
findViewById<TextInputLayout>(R.id.tilComments)?.hint = StringProvider.getString("hint_comments")
findViewById<MaterialButton>(R.id.btnSaveRating)?.text = StringProvider.getString("btn_save_rating")
findViewById<MaterialButton>(R.id.btnCancelRating)?.text = StringProvider.getString("btn_cancel")
}
private fun loadCowDetails() {
val docsFolder = StorageUtils.getDocumentsFolder()
val csvFile = File(docsFolder, "cow_profiles.csv")
if (!csvFile.exists()) return
try {
val lines = csvFile.readLines()
val record = lines.find { it.startsWith("$currentCowName,") }?.split(",") ?: return
if (record.size >= 8) {
// Translate values for display
val storedSpecies = record[1]
val speciesKey = StringProvider.getKeyForEnglishValue(storedSpecies)
val displaySpecies = if (speciesKey != null) StringProvider.getString(speciesKey) else storedSpecies
val storedBreed = record[2]
val breedKey = StringProvider.getKeyForEnglishValue(storedBreed)
val displayBreed = if (breedKey != null) StringProvider.getString(breedKey) else storedBreed
val storedStatus = record[6]
val statusKey = StringProvider.getKeyForEnglishValue(storedStatus)
val displayStatus = if (statusKey != null) StringProvider.getString(statusKey) else storedStatus
val infoText = StringBuilder()
infoText.append("${StringProvider.getString("label_species")} $displaySpecies\n")
infoText.append("${StringProvider.getString("label_breed")} $displayBreed\n")
infoText.append("${StringProvider.getString("label_age")} ${record[3]} ${StringProvider.getString("unit_years")}\n")
infoText.append("${StringProvider.getString("label_milk_yield")} ${record[4]} ${StringProvider.getString("unit_liters")}\n")
infoText.append("${StringProvider.getString("label_calving_no")} ${record[5]}\n")
infoText.append("${StringProvider.getString("label_status")} $displayStatus\n")
infoText.append("${StringProvider.getString("hint_description")}: ${record[7]}")
findViewById<TextView>(R.id.tvCowDetails).text = infoText.toString()
}
} catch (e: Exception) {
e.printStackTrace()
}
}
private fun loadCowImages() {
val container = findViewById<LinearLayout>(R.id.ratingImagesContainer) ?: return
container.removeAllViews()
val cowImagesFolder = StorageUtils.getCowImageFolder(currentCowName)
val files = cowImagesFolder.listFiles { _, name -> name.startsWith("${currentCowName}_") && name.endsWith(".jpg") } ?: return
val horizontalScroll = android.widget.HorizontalScrollView(this)
horizontalScroll.layoutParams = LinearLayout.LayoutParams(
LinearLayout.LayoutParams.MATCH_PARENT,
LinearLayout.LayoutParams.WRAP_CONTENT
)
horizontalScroll.isFillViewport = false
val imagesLayout = LinearLayout(this)
imagesLayout.orientation = LinearLayout.HORIZONTAL
files.forEach { file ->
val imageView = ImageView(this)
val size = (100 * resources.displayMetrics.density).toInt()
val params = LinearLayout.LayoutParams(size, size)
params.setMargins(0, 0, 16, 0)
imageView.layoutParams = params
imageView.scaleType = ImageView.ScaleType.CENTER_CROP
val bitmap = BitmapFactory.decodeFile(file.absolutePath)
imageView.setImageBitmap(bitmap)
imageView.setOnClickListener {
val intent = Intent(this, FullScreenImageActivity::class.java)
intent.putExtra("IMAGE_PATH", file.absolutePath)
startActivity(intent)
}
imagesLayout.addView(imageView)
}
horizontalScroll.addView(imagesLayout)
container.addView(horizontalScroll)
}
private fun setupRatingSection() {
ratingsContainer = findViewById(R.id.ratingsContainer)
ratingsContainer.removeAllViews()
features.forEach { feature ->
val featureView = layoutInflater.inflate(R.layout.item_feature_rating, ratingsContainer, false)
// Localize feature name
val key = "feature_" + feature.lowercase(Locale.ROOT).replace(" ", "_")
val displayName = StringProvider.getString(key)
val finalName = if (displayName.isNotEmpty()) displayName else feature
featureView.findViewById<TextView>(R.id.tvFeatureName).text = finalName
val buttonContainer = featureView.findViewById<LinearLayout>(R.id.buttonContainer)
val segmentViews = mutableListOf<TextView>()
// Create 9 segments
for (i in 1..9) {
val tv = TextView(this)
val params = LinearLayout.LayoutParams(
0,
LinearLayout.LayoutParams.MATCH_PARENT,
1f
)
tv.layoutParams = params
tv.text = i.toString()
tv.gravity = Gravity.CENTER
tv.setTextColor(ContextCompat.getColor(this, R.color.black))
tv.textSize = 14f
tv.setBackgroundColor(Color.TRANSPARENT)
tv.setOnClickListener {
val currentRating = ratingsMap[feature] ?: 0
if (currentRating == i) {
// Clicked already selected -> Clear selection
ratingsMap[feature] = 0
updateSegmentSelection(segmentViews, 0)
} else {
// Select new rating
ratingsMap[feature] = i
updateSegmentSelection(segmentViews, i)
}
}
segmentViews.add(tv)
buttonContainer.addView(tv)
}
featureView.tag = segmentViews
ratingsContainer.addView(featureView)
}
}
private fun updateSegmentSelection(segments: List<TextView>, selectedRating: Int) {
segments.forEachIndexed { index, tv ->
val rating = index + 1
if (rating == selectedRating) {
tv.setTextColor(Color.WHITE)
val radius = (8 * resources.displayMetrics.density)
val drawable = GradientDrawable()
drawable.setColor(Color.parseColor("#6D4C41"))
if (rating == 1) {
drawable.cornerRadii = floatArrayOf(radius, radius, 0f, 0f, 0f, 0f, radius, radius)
} else if (rating == 9) {
drawable.cornerRadii = floatArrayOf(0f, 0f, radius, radius, radius, radius, 0f, 0f)
} else {
drawable.cornerRadius = 0f
}
tv.background = drawable
} else {
tv.setTextColor(Color.parseColor("#5D4037"))
tv.background = null
}
}
}
private fun loadExistingRatings() {
val docsFolder = StorageUtils.getDocumentsFolder()
val ratingsFile = File(docsFolder, "cow_ratings.csv")
if (!ratingsFile.exists()) return
try {
val lines = ratingsFile.readLines()
// Format: CowID,Comments,Feature1,Feature2,...
val record = lines.find { it.startsWith("$currentCowName,") }?.split(",") ?: return
if (record.size >= 2) {
// Index 0: ID
// Index 1: Comments
val comments = record[1].replace(";", ",")
findViewById<TextInputLayout>(R.id.tilComments).editText?.setText(comments)
// Ratings start from index 2
features.forEachIndexed { index, feature ->
val ratingStr = record.getOrNull(index + 2)
val rating = ratingStr?.toIntOrNull() ?: 0
if (rating > 0) {
ratingsMap[feature] = rating
// Find view and update by index
val featureView = ratingsContainer.getChildAt(index)
if (featureView != null) {
@Suppress("UNCHECKED_CAST")
val segments = featureView.tag as? List<TextView>
segments?.let { updateSegmentSelection(it, rating) }
}
}
}
}
} catch (e: Exception) {
e.printStackTrace()
}
}
private fun saveRatings() {
val commentsInput = findViewById<TextInputLayout>(R.id.tilComments).editText?.text.toString()
val comments = commentsInput.replace(",", ";")
val docsFolder = StorageUtils.getDocumentsFolder()
val ratingsFile = File(docsFolder, "cow_ratings.csv")
// Dynamically build header
val headerBuilder = StringBuilder("CowID,Comments")
features.forEach { headerBuilder.append(",$it") }
val header = headerBuilder.toString()
// Build row
val rowBuilder = StringBuilder()
rowBuilder.append("$currentCowName,$comments")
features.forEach { feature ->
val rating = ratingsMap[feature] ?: 0 // Defaults to 0 if not selected
rowBuilder.append(",$rating")
}
val newRow = rowBuilder.toString()
try {
val lines = if (ratingsFile.exists()) ratingsFile.readLines().toMutableList() else mutableListOf()
if (lines.isEmpty()) {
lines.add(header)
} else if (lines[0] != header) {
// Header mismatch handling
}
val existingIndex = lines.indexOfFirst { it.startsWith("$currentCowName,") }
if (existingIndex != -1) {
lines[existingIndex] = newRow
} else {
lines.add(newRow)
}
FileWriter(ratingsFile).use { writer ->
lines.forEach { line ->
writer.write(line + "\n")
}
}
Toast.makeText(this, StringProvider.getString("toast_ratings_saved"), Toast.LENGTH_SHORT).show()
finish()
} catch (e: Exception) {
Toast.makeText(this, StringProvider.getString("toast_error_saving_ratings"), Toast.LENGTH_SHORT).show()
}
}
}

View File

@ -1,46 +0,0 @@
package com.example.animalrating
import android.os.Environment
import java.io.File
object StorageUtils {
private const val ROOT_FOLDER_NAME = "com.AnimalRating"
private fun getBaseFolder(): File {
// Changed to Android/media/com.AnimalRating as requested
val folder = File(Environment.getExternalStorageDirectory(), "Android/media/$ROOT_FOLDER_NAME")
if (!folder.exists()) {
folder.mkdirs()
}
return folder
}
fun getDocumentsFolder(): File {
val folder = File(getBaseFolder(), "Documents")
if (!folder.exists()) {
folder.mkdirs()
}
return folder
}
fun getImagesBaseFolder(): File {
val folder = File(getBaseFolder(), "Images")
if (!folder.exists()) {
folder.mkdirs()
}
return folder
}
fun getCowImageFolder(cowId: String): File {
return File(getImagesBaseFolder(), cowId)
}
fun getVideosFolder(): File {
val folder = File(getBaseFolder(), "Videos")
if (!folder.exists()) {
folder.mkdirs()
}
return folder
}
}

View File

@ -1,101 +0,0 @@
package com.example.animalrating
import android.content.Context
import org.json.JSONObject
import java.io.InputStream
object StringProvider {
private var stringData: JSONObject? = null
private var currentLanguage = "English"
private const val DEFAULT_LANGUAGE = "English"
fun initialize(context: Context) {
if (stringData == null) {
try {
val inputStream: InputStream = context.resources.openRawResource(R.raw.strings)
val jsonString = inputStream.bufferedReader().use { it.readText() }
stringData = JSONObject(jsonString)
// Load saved language
val prefs = context.getSharedPreferences("AnimalRatingPrefs", Context.MODE_PRIVATE)
currentLanguage = prefs.getString("LANGUAGE", DEFAULT_LANGUAGE) ?: DEFAULT_LANGUAGE
} catch (e: Exception) {
e.printStackTrace()
// Handle error, maybe load default strings or log
}
}
}
fun setLanguage(language: String, context: Context) {
currentLanguage = language
// Save selected language
val prefs = context.getSharedPreferences("AnimalRatingPrefs", Context.MODE_PRIVATE)
prefs.edit().putString("LANGUAGE", language).apply()
}
fun getLanguages(): List<String> {
return stringData?.keys()?.asSequence()?.toList() ?: listOf(DEFAULT_LANGUAGE)
}
fun getString(key: String): String {
return getStringForLanguage(key, currentLanguage)
}
fun getStringEnglish(key: String): String {
return getStringForLanguage(key, DEFAULT_LANGUAGE)
}
private fun getStringForLanguage(key: String, language: String): String {
return try {
stringData?.getJSONObject(language)?.getString(key) ?: ""
} catch (e: Exception) {
// Fallback to English if key not found in current language
try {
if (language != DEFAULT_LANGUAGE) {
stringData?.getJSONObject(DEFAULT_LANGUAGE)?.getString(key) ?: ""
} else {
""
}
} catch (e2: Exception) {
"" // Return empty if not found anywhere
}
}
}
fun getKeyForValue(value: String): String? {
// Helper to find the key for a given localized value in current language
// This is expensive, but useful if we need to reverse lookup
val langData = stringData?.optJSONObject(currentLanguage) ?: return null
val keys = langData.keys()
while (keys.hasNext()) {
val key = keys.next()
if (langData.getString(key) == value) {
return key
}
}
// Also check English just in case it was stored as English
val englishData = stringData?.optJSONObject(DEFAULT_LANGUAGE) ?: return null
val engKeys = englishData.keys()
while (engKeys.hasNext()) {
val key = engKeys.next()
if (englishData.getString(key) == value) {
return key
}
}
return null
}
fun getKeyForEnglishValue(value: String): String? {
val englishData = stringData?.optJSONObject(DEFAULT_LANGUAGE) ?: return null
val engKeys = englishData.keys()
while (engKeys.hasNext()) {
val key = engKeys.next()
if (englishData.getString(key) == value) {
return key
}
}
return null
}
}

View File

@ -1,18 +0,0 @@
package com.example.animalrating.ml
import android.graphics.Bitmap
import androidx.camera.core.ImageAnalysis
import androidx.camera.core.ImageProxy
class CowAnalyzer(
private val listener: CowListener
) : ImageAnalysis.Analyzer {
interface CowListener {
fun onFrame(imageProxy: ImageProxy)
}
override fun analyze(image: ImageProxy) {
listener.onFrame(image)
}
}

View File

@ -1,43 +0,0 @@
package com.example.animalrating.ml
import android.graphics.*
import androidx.camera.core.ImageProxy
import java.io.ByteArrayOutputStream
/**
* Convert ImageProxy YUV_420_888 to Bitmap
*/
fun ImageProxy.toBitmap(): Bitmap {
val yBuffer = planes[0].buffer // Y
val uBuffer = planes[1].buffer // U
val vBuffer = planes[2].buffer // V
val ySize = yBuffer.remaining()
val uSize = uBuffer.remaining()
val vSize = vBuffer.remaining()
val nv21 = ByteArray(ySize + uSize + vSize)
// U and V are swapped, so we place V first then U
yBuffer.get(nv21, 0, ySize)
vBuffer.get(nv21, ySize, vSize)
uBuffer.get(nv21, ySize + vSize, uSize)
val yuvImage = YuvImage(
nv21,
ImageFormat.NV21,
width,
height,
null
)
val out = ByteArrayOutputStream()
yuvImage.compressToJpeg(
Rect(0, 0, width, height),
90,
out
)
val jpegBytes = out.toByteArray()
return BitmapFactory.decodeByteArray(jpegBytes, 0, jpegBytes.size)
}

View File

@ -1,24 +0,0 @@
package com.example.animalrating.ui
import android.content.Context
import android.graphics.*
import android.util.AttributeSet
import android.view.View
class MaskOverlay(context: Context, attrs: AttributeSet?) : View(context, attrs) {
private var maskBitmap: Bitmap? = null
private var matrix: Matrix = Matrix()
fun updateMask(bitmap: Bitmap?, transform: Matrix?) {
maskBitmap = bitmap
matrix = transform ?: Matrix()
invalidate()
}
override fun onDraw(canvas: Canvas) {
super.onDraw(canvas)
val bmp = maskBitmap ?: return
canvas.drawBitmap(bmp, matrix, null)
}
}

View File

@ -1,61 +0,0 @@
package com.example.animalrating.ui
import android.content.Context
import android.graphics.*
import android.util.AttributeSet
import android.util.Log
import android.view.View
class SilhouetteOverlay(context: Context, attrs: AttributeSet?) : View(context, attrs) {
private val paint = Paint().apply {
color = Color.GREEN
style = Paint.Style.STROKE
strokeWidth = 5f
}
private val silhouettePaint = Paint().apply {
alpha = 128 // 50% opacity
}
private var silhouette: Bitmap? = null
fun setSilhouette(drawableId: Int) {
try {
if (drawableId != 0) {
silhouette = BitmapFactory.decodeResource(resources, drawableId)
} else {
silhouette = null
}
invalidate()
} catch (e: Exception) {
Log.e("SilhouetteOverlay", "Error loading silhouette", e)
}
}
override fun onDraw(canvas: Canvas) {
super.onDraw(canvas)
silhouette?.let { bmp ->
val viewW = width.toFloat()
val viewH = height.toFloat()
val bmpW = bmp.width.toFloat()
val bmpH = bmp.height.toFloat()
// Calculate scale to fit (FIT_CENTER)
val scale = kotlin.math.min(viewW / bmpW, viewH / bmpH)
val scaledW = bmpW * scale
val scaledH = bmpH * scale
val left = (viewW - scaledW) / 2f
val top = (viewH - scaledH) / 2f
val destRect = RectF(left, top, left + scaledW, top + scaledH)
val srcRect = Rect(0, 0, bmp.width, bmp.height)
canvas.drawBitmap(bmp, srcRect, destRect, silhouettePaint)
}
}
}

View File

@ -1,11 +0,0 @@
package com.example.animalrating.ui.theme
import androidx.compose.ui.graphics.Color
val Purple80 = Color(0xFFD0BCFF)
val PurpleGrey80 = Color(0xFFCCC2DC)
val Pink80 = Color(0xFFEFB8C8)
val Purple40 = Color(0xFF6650a4)
val PurpleGrey40 = Color(0xFF625b71)
val Pink40 = Color(0xFF7D5260)

View File

@ -1,58 +0,0 @@
package com.example.animalrating.ui.theme
import android.app.Activity
import android.os.Build
import androidx.compose.foundation.isSystemInDarkTheme
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.darkColorScheme
import androidx.compose.material3.dynamicDarkColorScheme
import androidx.compose.material3.dynamicLightColorScheme
import androidx.compose.material3.lightColorScheme
import androidx.compose.runtime.Composable
import androidx.compose.ui.platform.LocalContext
private val DarkColorScheme = darkColorScheme(
primary = Purple80,
secondary = PurpleGrey80,
tertiary = Pink80
)
private val LightColorScheme = lightColorScheme(
primary = Purple40,
secondary = PurpleGrey40,
tertiary = Pink40
/* Other default colors to override
background = Color(0xFFFFFBFE),
surface = Color(0xFFFFFBFE),
onPrimary = Color.White,
onSecondary = Color.White,
onTertiary = Color.White,
onBackground = Color(0xFF1C1B1F),
onSurface = Color(0xFF1C1B1F),
*/
)
@Composable
fun AnimalRatingTheme(
darkTheme: Boolean = isSystemInDarkTheme(),
// Dynamic color is available on Android 12+
dynamicColor: Boolean = true,
content: @Composable () -> Unit
) {
val colorScheme = when {
dynamicColor && Build.VERSION.SDK_INT >= Build.VERSION_CODES.S -> {
val context = LocalContext.current
if (darkTheme) dynamicDarkColorScheme(context) else dynamicLightColorScheme(context)
}
darkTheme -> DarkColorScheme
else -> LightColorScheme
}
MaterialTheme(
colorScheme = colorScheme,
typography = Typography,
content = content
)
}

View File

@ -1,34 +0,0 @@
package com.example.animalrating.ui.theme
import androidx.compose.material3.Typography
import androidx.compose.ui.text.TextStyle
import androidx.compose.ui.text.font.FontFamily
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.unit.sp
// Set of Material typography styles to start with
val Typography = Typography(
bodyLarge = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 16.sp,
lineHeight = 24.sp,
letterSpacing = 0.5.sp
)
/* Other default text styles to override
titleLarge = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Normal,
fontSize = 22.sp,
lineHeight = 28.sp,
letterSpacing = 0.sp
),
labelSmall = TextStyle(
fontFamily = FontFamily.Default,
fontWeight = FontWeight.Medium,
fontSize = 11.sp,
lineHeight = 16.sp,
letterSpacing = 0.5.sp
)
*/
)

View File

@ -0,0 +1,19 @@
package com.example.livingai
import android.app.Application
import com.example.livingai.di.appModule
import org.koin.android.ext.koin.androidContext
import org.koin.android.ext.koin.androidLogger
import org.koin.core.context.startKoin
class LivingAIApplication: Application() {
override fun onCreate() {
super.onCreate()
startKoin {
androidLogger()
androidContext(this@LivingAIApplication)
modules(appModule)
}
}
}

View File

@ -0,0 +1,73 @@
package com.example.livingai
import android.os.Bundle
import androidx.activity.ComponentActivity
import androidx.activity.SystemBarStyle
import androidx.activity.compose.LocalActivityResultRegistryOwner
import androidx.activity.compose.setContent
import androidx.activity.enableEdgeToEdge
import androidx.compose.foundation.background
import androidx.compose.foundation.layout.Box
import androidx.compose.material3.MaterialTheme
import androidx.compose.runtime.CompositionLocalProvider
import androidx.compose.runtime.collectAsState
import androidx.compose.runtime.getValue
import androidx.compose.ui.Modifier
import androidx.compose.ui.graphics.Color
import androidx.compose.ui.graphics.toArgb
import androidx.compose.ui.platform.LocalContext
import androidx.core.splashscreen.SplashScreen.Companion.installSplashScreen
import androidx.core.view.WindowCompat
import com.example.livingai.domain.usecases.AppDataUseCases
import com.example.livingai.pages.home.HomeViewModel
import com.example.livingai.pages.navigation.NavGraph
import com.example.livingai.ui.theme.LivingAITheme
import com.example.livingai.utils.LocaleHelper
import org.koin.android.ext.android.inject
import org.koin.androidx.viewmodel.ext.android.viewModel
class MainActivity : ComponentActivity() {
private val viewModel by viewModel<HomeViewModel>()
private val appDataUseCases: AppDataUseCases by inject()
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
WindowCompat.setDecorFitsSystemWindows(window, false)
installSplashScreen().apply {
setKeepOnScreenCondition {
viewModel.splashCondition.value
}
}
setContent {
val settings by appDataUseCases.getSettings().collectAsState(initial = null)
val context = LocalContext.current
val localizedContext = settings?.let {
LocaleHelper.applyLocale(context, it.language)
} ?: context
CompositionLocalProvider(
LocalContext provides localizedContext,
LocalActivityResultRegistryOwner provides this
) {
LivingAITheme {
enableEdgeToEdge(
statusBarStyle = SystemBarStyle.auto(
lightScrim = Color.Transparent.toArgb(),
darkScrim = Color.Transparent.toArgb()
),
navigationBarStyle = SystemBarStyle.auto(
lightScrim = Color.Transparent.toArgb(),
darkScrim = Color.Transparent.toArgb()
)
)
Box(modifier = Modifier.background(color = MaterialTheme.colorScheme.background)) {
val startDestination = viewModel.startDestination.value
NavGraph(startDestination = startDestination)
}
}
}
}
}
}

View File

@ -0,0 +1,466 @@
package com.example.livingai.data.camera
import android.content.Context
import android.graphics.Bitmap
import android.graphics.RectF
import android.util.Log
import com.example.livingai.R
import com.example.livingai.domain.camera.*
import com.example.livingai.domain.model.camera.*
import com.example.livingai.utils.SignedMask
import com.example.livingai.utils.SilhouetteManager
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.subject.SubjectSegmentation
import com.google.mlkit.vision.segmentation.subject.SubjectSegmenterOptions
import kotlinx.coroutines.suspendCancellableCoroutine
import org.tensorflow.lite.Interpreter
import org.tensorflow.lite.support.common.FileUtil
import java.io.IOException
import java.nio.ByteBuffer
import java.nio.ByteOrder
import java.util.concurrent.atomic.AtomicBoolean
import kotlin.coroutines.resume
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min
/* ============================================================= */
/* ORIENTATION CHECKER */
/* ============================================================= */
class DefaultOrientationChecker : OrientationChecker {
override suspend fun analyze(input: PipelineInput): Instruction {
val isPortraitRequired =
input.orientation.lowercase() == "front" ||
input.orientation.lowercase() == "back"
val isPortrait = input.deviceOrientation == 90 || input.deviceOrientation == 270
val isLandscape = input.deviceOrientation == 0 || input.deviceOrientation == 180
val valid = if (isPortraitRequired) isPortrait else isLandscape
return Instruction(
message = if (valid) "Orientation Correct"
else if (isPortraitRequired) "Turn to portrait mode"
else "Turn to landscape mode",
animationResId = if (valid) null else R.drawable.ic_launcher_foreground,
isValid = valid,
result = OrientationResult(
input.deviceOrientation,
if (isPortraitRequired) CameraOrientation.PORTRAIT else CameraOrientation.LANDSCAPE
)
)
}
}
/* ============================================================= */
/* TILT CHECKER */
/* ============================================================= */
class DefaultTiltChecker : TiltChecker {
override suspend fun analyze(input: PipelineInput): Instruction {
val tolerance = 20f
val (targetPitch, targetRoll) = Pair(-90f, 0f)
Log.d("TiltCheckerMessage", "targetPitch: ${input.devicePitch}, targetRoll: ${input.deviceRoll}, targetAz: ${input.deviceAzimuth}, tP: $targetPitch, tR: $targetRoll")
val pitchError = abs(input.devicePitch - targetPitch)
val rollError = abs(input.deviceRoll - targetRoll)
val isLevel = pitchError <= tolerance && rollError <= tolerance && input.deviceAzimuth > 0
val isRollError = rollError > tolerance
val isPitchError = pitchError > tolerance
val message = if (isLevel) {
"Device is level"
} else {
when {
input.deviceAzimuth < 0 -> "Tilt phone forward"
input.deviceAzimuth >= 0 && !isRollError && isPitchError -> "Tilt phone backward"
input.deviceRoll > 0 -> "Rotate phone left"
input.deviceRoll <= 0 -> "Rotate phone right"
else -> "Device is level"
}
}
return Instruction(
message = message,
isValid = isLevel,
result = TiltResult(
roll = input.deviceRoll,
pitch = input.devicePitch,
isLevel = isLevel
)
)
}
}
/* ============================================================= */
/* TFLITE OBJECT DETECTOR (PRIMARY + REFERENCE OBJECTS) */
/* ============================================================= */
class TFLiteObjectDetector(context: Context) : ObjectDetector {
private var interpreter: Interpreter? = null
private var labels: List<String> = emptyList()
private var inputW = 0
private var inputH = 0
private var maxDetections = 25
init {
try {
interpreter = Interpreter(
FileUtil.loadMappedFile(context, "efficientdet-lite0.tflite")
)
labels = FileUtil.loadLabels(context, "labels.txt")
val inputShape = interpreter!!.getInputTensor(0).shape()
inputW = inputShape[1]
inputH = inputShape[2]
maxDetections = interpreter!!.getOutputTensor(0).shape()[1]
} catch (e: IOException) {
Log.e("Detector", "Failed to load model", e)
interpreter = null
}
}
override suspend fun analyze(input: PipelineInput): Instruction {
val image = input.image
?: return Instruction("Waiting for camera", isValid = false)
val resized = Bitmap.createScaledBitmap(image, inputW, inputH, true)
val buffer = bitmapToBuffer(resized)
val locations = Array(1) { Array(maxDetections) { FloatArray(4) } }
val classes = Array(1) { FloatArray(maxDetections) }
val scores = Array(1) { FloatArray(maxDetections) }
val count = FloatArray(1)
interpreter?.runForMultipleInputsOutputs(
arrayOf(buffer),
mapOf(0 to locations, 1 to classes, 2 to scores, 3 to count)
)
val detections = mutableListOf<Detection>()
for (i in 0 until count[0].toInt()) {
if (scores[0][i] < 0.5f) continue
val label = labels.getOrElse(classes[0][i].toInt()) { "Unknown" }
val b = locations[0][i]
detections += Detection(
label,
scores[0][i],
RectF(
b[1] * image.width,
b[0] * image.height,
b[3] * image.width,
b[2] * image.height
)
)
}
val primary = detections
.filter { it.label.equals(input.targetAnimal, true) }
.maxByOrNull { it.confidence }
val refs = detections
.filter { it !== primary }
.mapIndexed { i, d ->
ReferenceObject(
id = "ref_$i",
label = d.label,
bounds = d.bounds,
relativeHeight = d.bounds.height() / image.height,
relativeWidth = d.bounds.width() / image.width,
distance = null
)
}
return Instruction(
message = if (primary != null) "Cow detected" else "Cow not detected",
isValid = primary != null,
result = DetectionResult(
isAnimalDetected = primary != null,
animalBounds = primary?.bounds,
referenceObjects = refs,
label = primary?.label,
confidence = primary?.confidence ?: 0f,
segmentationMask = null // Initialize with null as detection step doesn't do segmentation
)
)
}
private fun bitmapToBuffer(bitmap: Bitmap): ByteBuffer {
val buffer = ByteBuffer.allocateDirect(inputW * inputH * 3)
buffer.order(ByteOrder.nativeOrder())
val pixels = IntArray(inputW * inputH)
bitmap.getPixels(pixels, 0, inputW, 0, 0, inputW, inputH)
for (p in pixels) {
buffer.put(((p shr 16) and 0xFF).toByte())
buffer.put(((p shr 8) and 0xFF).toByte())
buffer.put((p and 0xFF).toByte())
}
return buffer
}
data class Detection(val label: String, val confidence: Float, val bounds: RectF)
}
class MockPoseAnalyzer : PoseAnalyzer {
private val segmenter by lazy {
SubjectSegmentation.getClient(
SubjectSegmenterOptions.Builder()
.enableForegroundConfidenceMask()
.build()
)
}
private val isSegmentationRunning = AtomicBoolean(false)
private var lastSegmentationValid: Boolean? = null
override suspend fun analyze(input: PipelineInput): Instruction {
val detection = input.previousDetectionResult
?: return invalidate("No detection")
val cowBoxImage = detection.animalBounds
?: return invalidate("Cow not detected")
val image = input.image
?: return invalidate("No image")
val silhouette = SilhouetteManager.getSilhouette(input.orientation)
?: return invalidate("Silhouette missing")
val cowBoxScreen = imageToScreenRect(
box = cowBoxImage,
imageWidth = image.width,
imageHeight = image.height,
screenWidth = input.screenWidthPx,
screenHeight = input.screenHeightPx
)
val silhouetteBoxScreen = silhouette.boundingBox
val align = checkAlignment(
detected = cowBoxScreen,
reference = silhouetteBoxScreen,
toleranceRatio = 0.15f
)
if (align.issue != AlignmentIssue.OK) {
lastSegmentationValid = false
return alignmentToInstruction(align)
}
// If segmentation already running → reuse last result
if (!isSegmentationRunning.compareAndSet(false, true)) {
return Instruction(
message = if (lastSegmentationValid == true)
"Pose Correct"
else
"Hold steady",
isValid = lastSegmentationValid == true,
result = detection
)
}
try {
val cropped = Bitmap.createBitmap(
image,
cowBoxImage.left.toInt(),
cowBoxImage.top.toInt(),
cowBoxImage.width().toInt(),
cowBoxImage.height().toInt()
)
val resized = Bitmap.createScaledBitmap(
cropped,
silhouette.croppedBitmap.width,
silhouette.croppedBitmap.height,
true
)
val mask = segment(resized)
val valid = if (mask != null) {
val score = similarity(mask, silhouette.signedMask)
score >= 0.40f
} else {
false
}
lastSegmentationValid = valid
return Instruction(
message = if (valid) "Pose Correct" else "Adjust Position",
isValid = valid,
result = detection.copy(segmentationMask = mask) // Pass the mask in the result
)
} finally {
isSegmentationRunning.set(false)
}
}
/* -------------------------------------------------- */
/* HELPERS */
/* -------------------------------------------------- */
private fun invalidate(reason: String): Instruction {
lastSegmentationValid = false
return Instruction(reason, isValid = false)
}
private suspend fun segment(bitmap: Bitmap): ByteArray? =
suspendCancellableCoroutine { cont ->
segmenter.process(InputImage.fromBitmap(bitmap, 0))
.addOnSuccessListener { r ->
val buf = r.foregroundConfidenceMask
?: return@addOnSuccessListener cont.resume(null)
buf.rewind()
val out = ByteArray(bitmap.width * bitmap.height)
for (i in out.indices) {
out[i] = if (buf.get() > 0.5f) 1 else 0
}
cont.resume(out)
}
.addOnFailureListener {
cont.resume(null)
}
}
private fun similarity(mask: ByteArray, ref: SignedMask): Float {
var s = 0f
var i = 0
for (row in ref.mask)
for (v in row)
s += mask[i++] * v
return if (ref.maxValue == 0f) 0f else s / ref.maxValue
}
}
/* ============================================================= */
/* ALIGNMENT HELPERS (UNCHANGED) */
/* ============================================================= */
enum class AlignmentIssue { TOO_SMALL, TOO_LARGE, MOVE_LEFT, MOVE_RIGHT, MOVE_UP, MOVE_DOWN, OK }
data class AlignmentResult(val issue: AlignmentIssue, val scale: Float, val dx: Float, val dy: Float)
fun checkAlignment(
detected: RectF,
reference: RectF,
toleranceRatio: Float
): AlignmentResult {
val tolX = reference.width() * toleranceRatio
val tolY = reference.height() * toleranceRatio
if (detected.left < reference.left - tolX)
return AlignmentResult(AlignmentIssue.MOVE_RIGHT, detected.width() / reference.width(), detected.left - reference.left, 0f)
if (detected.right > reference.right + tolX)
return AlignmentResult(AlignmentIssue.MOVE_LEFT, detected.width() / reference.width(), detected.right - reference.right, 0f)
if (detected.top < reference.top - tolY)
return AlignmentResult(AlignmentIssue.MOVE_DOWN, detected.height() / reference.height(), 0f, detected.top - reference.top)
if (detected.bottom > reference.bottom + tolY)
return AlignmentResult(AlignmentIssue.MOVE_UP, detected.height() / reference.height(), 0f, detected.bottom - reference.bottom)
val scale = min(
detected.width() / reference.width(),
detected.height() / reference.height()
)
if (scale < 1f - toleranceRatio)
return AlignmentResult(AlignmentIssue.TOO_SMALL, scale, 0f, 0f)
if (scale > 1f + toleranceRatio)
return AlignmentResult(AlignmentIssue.TOO_LARGE, scale, 0f, 0f)
return AlignmentResult(AlignmentIssue.OK, scale, 0f, 0f)
}
fun alignmentToInstruction(a: AlignmentResult) = when (a.issue) {
AlignmentIssue.TOO_SMALL -> Instruction("Move closer", false)
AlignmentIssue.TOO_LARGE -> Instruction("Move backward", false)
AlignmentIssue.MOVE_LEFT -> Instruction("Move right", false)
AlignmentIssue.MOVE_RIGHT -> Instruction("Move left", false)
AlignmentIssue.MOVE_UP -> Instruction("Move down", false)
AlignmentIssue.MOVE_DOWN -> Instruction("Move up", false)
AlignmentIssue.OK -> Instruction("Hold steady", true)
}
private fun imageToScreenRect(
box: RectF,
imageWidth: Int,
imageHeight: Int,
screenWidth: Float,
screenHeight: Float
): RectF {
// EXACT SAME LOGIC AS DetectionOverlay
val widthRatio = screenWidth / imageWidth
val heightRatio = screenHeight / imageHeight
val scale = max(widthRatio, heightRatio)
val offsetX = (screenWidth - imageWidth * scale) / 2f
val offsetY = (screenHeight - imageHeight * scale) / 2f
return RectF(
box.left * scale + offsetX,
box.top * scale + offsetY,
box.right * scale + offsetX,
box.bottom * scale + offsetY
)
}
/* ============================================================= */
/* CAPTURE + MEASUREMENT (UNCHANGED) */
/* ============================================================= */
class DefaultCaptureHandler : CaptureHandler {
override suspend fun capture(input: PipelineInput, detectionResult: DetectionResult): CaptureData =
CaptureData(
image = input.image!!,
segmentationMask = BooleanArray(0),
animalMetrics = ObjectMetrics(0f, 0f, 1f),
referenceObjects = detectionResult.referenceObjects
)
}
class DefaultMeasurementCalculator : MeasurementCalculator {
override fun calculateRealMetrics(
targetHeight: Float,
referenceObject: ReferenceObject,
currentMetrics: ObjectMetrics
): RealWorldMetrics {
if (referenceObject.relativeHeight == 0f)
return RealWorldMetrics(0f, 0f, 0f)
val scale = targetHeight / referenceObject.relativeHeight
return RealWorldMetrics(
height = currentMetrics.relativeHeight * scale,
width = currentMetrics.relativeWidth * scale,
distance = currentMetrics.distance
)
}
}

View File

@ -0,0 +1,48 @@
package com.example.livingai.data.local
import androidx.paging.PagingSource
import androidx.paging.PagingState
import com.example.livingai.domain.model.AnimalProfile
class AnimalDataPagingSource(
private val dataSource: CSVDataSource
) : PagingSource<Int, AnimalProfile>() {
override fun getRefreshKey(state: PagingState<Int, AnimalProfile>): Int? {
return state.anchorPosition?.let { anchorPosition ->
val anchorPage = state.closestPageToPosition(anchorPosition)
anchorPage?.prevKey?.plus(1) ?: anchorPage?.nextKey?.minus(1)
}
}
override suspend fun load(params: LoadParams<Int>): LoadResult<Int, AnimalProfile> {
val page = params.key ?: 0
return try {
// Since CSVDataSource reads all lines at once currently (simulated simple DB),
// we will paginate the list in memory.
val allProfiles = dataSource.getAllAnimalProfiles()
val start = page * params.loadSize
val end = minOf(start + params.loadSize, allProfiles.size)
if (start >= allProfiles.size) {
return LoadResult.Page(
data = emptyList(),
prevKey = if (page > 0) page - 1 else null,
nextKey = null
)
}
val pagedData = allProfiles.subList(start, end)
LoadResult.Page(
data = pagedData,
prevKey = if (page > 0) page - 1 else null,
nextKey = if (end < allProfiles.size) page + 1 else null
)
} catch (e: Exception) {
LoadResult.Error(e)
}
}
}

View File

@ -0,0 +1,424 @@
package com.example.livingai.data.local
import android.content.ContentUris
import android.content.ContentValues
import android.content.Context
import android.net.Uri
import android.os.Build
import android.os.Environment
import android.provider.MediaStore
import androidx.paging.Pager
import androidx.paging.PagingConfig
import androidx.paging.PagingData
import com.example.livingai.domain.model.*
import com.example.livingai.domain.repository.business.DataSource
import com.opencsv.CSVReader
import com.opencsv.CSVWriter
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.flow.*
import kotlinx.coroutines.sync.Mutex
import kotlinx.coroutines.sync.withLock
import kotlinx.coroutines.withContext
import java.io.*
class CSVDataSource(
private val context: Context,
private val fileName: String,
private val dispatchers: com.example.livingai.utils.CoroutineDispatchers
) : DataSource {
private val folderName = "LivingAI"
private val mutex = Mutex()
private var cachedUri: Uri? = null
private suspend fun getCsvUri(): Uri = withContext(dispatchers.io) {
cachedUri?.let { return@withContext it }
val uri = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
queryOrCreateCsvQ()
} else {
legacyGetOrCreateFile()
}
cachedUri = uri
return@withContext uri
}
private fun queryOrCreateCsvQ(): Uri {
val collection = MediaStore.Files.getContentUri(MediaStore.VOLUME_EXTERNAL_PRIMARY)
val projection = arrayOf(
MediaStore.Files.FileColumns._ID,
MediaStore.Files.FileColumns.DISPLAY_NAME,
MediaStore.Files.FileColumns.RELATIVE_PATH
)
val cursor = context.contentResolver.query(
collection,
projection,
"${MediaStore.Files.FileColumns.DISPLAY_NAME}=?",
arrayOf(fileName),
null
)
cursor?.use {
val idCol = it.getColumnIndexOrThrow(MediaStore.Files.FileColumns._ID)
val pathCol = it.getColumnIndexOrThrow(MediaStore.Files.FileColumns.RELATIVE_PATH)
while (it.moveToNext()) {
val relPath = it.getString(pathCol) ?: ""
if (relPath.contains(folderName)) {
val id = it.getLong(idCol)
return ContentUris.withAppendedId(collection, id)
}
}
}
// Create file if not found
val values = ContentValues().apply {
put(MediaStore.Files.FileColumns.DISPLAY_NAME, fileName)
put(MediaStore.Files.FileColumns.MIME_TYPE, "text/csv")
put(
MediaStore.Files.FileColumns.RELATIVE_PATH,
"${Environment.DIRECTORY_DOCUMENTS}/$folderName/"
)
}
return context.contentResolver.insert(collection, values)!!.also { uri ->
writeHeader(uri)
}
}
private fun legacyGetOrCreateFile(): Uri {
val dir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_DOCUMENTS)
val sub = File(dir, folderName)
if (!sub.exists()) sub.mkdirs()
val file = File(sub, fileName)
if (!file.exists()) {
file.createNewFile()
writeHeaderLegacy(file)
}
return Uri.fromFile(file)
}
private suspend fun readAllLines(): List<Array<String>> = mutex.withLock {
val uri = getCsvUri()
return withContext(dispatchers.io) {
try {
context.contentResolver.openInputStream(uri)?.use { input ->
val reader = CSVReader(InputStreamReader(input))
val lines = reader.readAll()
reader.close()
if (lines.isNotEmpty() && lines[0].contentEquals(HEADER)) lines.drop(1)
else lines
} ?: emptyList()
} catch (e: Exception) {
e.printStackTrace()
emptyList()
}
}
}
private suspend fun writeAllLines(lines: List<Array<String>>) = mutex.withLock {
val uri = getCsvUri()
withContext(dispatchers.io) {
try {
context.contentResolver.openOutputStream(uri, "wt")?.use { out ->
val writer = CSVWriter(OutputStreamWriter(out))
writer.writeNext(HEADER)
writer.writeAll(lines)
writer.close()
}
} catch (e: Exception) {
e.printStackTrace()
}
}
}
private fun writeHeader(uri: Uri) {
context.contentResolver.openOutputStream(uri, "wt")?.use { out ->
CSVWriter(OutputStreamWriter(out)).use { it.writeNext(HEADER) }
}
}
private fun writeHeaderLegacy(file: File) {
CSVWriter(FileWriter(file)).use { it.writeNext(HEADER) }
}
// --------------------------------------------------------------------------------------------
// 3) PUBLIC API IMPLEMENTATION
// --------------------------------------------------------------------------------------------
override fun getAnimalProfiles(): Flow<PagingData<AnimalProfile>> =
Pager(PagingConfig(pageSize = 20)) {
AnimalDataPagingSource(this)
}.flow
suspend fun getAllAnimalProfiles(): List<AnimalProfile> {
return readAllLines().mapNotNull(::parseAnimalProfile)
}
override fun getAnimalDetails(animalId: String): Flow<AnimalDetails?> = flow {
emit(parseAnimalDetails(readAllLines().find { it.getOrNull(INDEX_ID) == animalId }))
}.flowOn(Dispatchers.IO)
override fun getAnimalRatings(animalId: String): Flow<AnimalRating?> = flow {
emit(parseAnimalRating(readAllLines().find { it.getOrNull(INDEX_ID) == animalId }))
}.flowOn(Dispatchers.IO)
override suspend fun setAnimalProfile(p: AnimalProfile) {
val lines = readAllLines().toMutableList()
val i = lines.indexOfFirst { it.getOrNull(INDEX_ID) == p.animalId }
if (i != -1) lines[i] = updateProfile(lines[i], p)
else lines.add(createProfile(p))
writeAllLines(lines)
}
override suspend fun setAnimalDetails(d: AnimalDetails) {
val lines = readAllLines().toMutableList()
val i = lines.indexOfFirst { it.getOrNull(INDEX_ID) == d.animalId }
if (i != -1) lines[i] = updateDetails(lines[i], d)
else lines.add(createDetails(d))
writeAllLines(lines)
}
override suspend fun setAnimalRatings(r: AnimalRating) {
val lines = readAllLines().toMutableList()
val i = lines.indexOfFirst { it.getOrNull(INDEX_ID) == r.animalId }
if (i != -1) lines[i] = updateRating(lines[i], r)
else lines.add(createRating(r))
writeAllLines(lines)
}
override suspend fun deleteAnimalProfile(animalId: String) {
val lines = readAllLines().toMutableList()
val i = lines.indexOfFirst { it.getOrNull(INDEX_ID) == animalId }
if (i != -1) {
lines.removeAt(i)
writeAllLines(lines)
}
}
// --------------------------------------------------------------------------------------------
// 4) PARSERS + SERIALIZERS (exact same behavior you had)
// --------------------------------------------------------------------------------------------
private fun empty(): Array<String> = Array(TOTAL_COLUMNS) { "" }
private fun parseAnimalProfile(row: Array<String>?): AnimalProfile? {
row ?: return null
val id = row.getOrNull(INDEX_ID)?.takeIf { it.isNotBlank() } ?: return null
return AnimalProfile(
animalId = id,
name = row[INDEX_NAME],
species = row[INDEX_SPECIES],
breed = row[INDEX_BREED],
sex = row[INDEX_SEX],
weight = row[INDEX_WEIGHT].toIntOrNull() ?: 0,
age = row[INDEX_AGE].toIntOrNull() ?: 0,
imageUrls = row[INDEX_IMAGES].split(";").filter { it.isNotBlank() },
overallRating = row[INDEX_RATING_OVERALL].toIntOrNull()
)
}
private fun parseAnimalDetails(row: Array<String>?): AnimalDetails? {
row ?: return null
val id = row.getOrNull(INDEX_ID)?.takeIf { it.isNotBlank() } ?: return null
return AnimalDetails(
animalId = id,
name = row[INDEX_NAME],
species = row[INDEX_SPECIES],
breed = row[INDEX_BREED],
sex = row[INDEX_SEX],
weight = row[INDEX_WEIGHT].toIntOrNull() ?: 0,
age = row[INDEX_AGE].toIntOrNull() ?: 0,
milkYield = row[INDEX_MILK].toIntOrNull() ?: 0,
calvingNumber = row[INDEX_CALVING].toIntOrNull() ?: 0,
reproductiveStatus = row[INDEX_REPRO],
description = row[INDEX_DESC],
images = row[INDEX_IMAGES].split(';').asSequence().filter { it.isNotBlank() }
.map { pair ->
val (k, v) = pair.split('=', limit = 2)
k to v
}.toMap(),
video = row[INDEX_VIDEO],
segmentedImages = row.getOrNull(INDEX_SEGMENTED_IMAGES)?.split(';')?.asSequence()?.filter { it.isNotBlank() }
?.map { pair ->
val (k, v) = pair.split('=', limit = 2)
k to v
}?.toMap() ?: emptyMap()
)
}
private fun parseAnimalRating(row: Array<String>?): AnimalRating? {
row ?: return null
val id = row.getOrNull(INDEX_ID)?.takeIf { it.isNotBlank() } ?: return null
return AnimalRating(
animalId = id,
overallRating = row[INDEX_RATING_OVERALL].toIntOrNull() ?: 0,
healthRating = row[INDEX_RATING_HEALTH].toIntOrNull() ?: 0,
breedRating = row[INDEX_RATING_BREED].toIntOrNull() ?: 0,
stature = row[INDEX_RATING_STATURE].toIntOrNull() ?: 0,
chestWidth = row[INDEX_RATING_CHEST].toIntOrNull() ?: 0,
bodyDepth = row[INDEX_RATING_BODY_DEPTH].toIntOrNull() ?: 0,
angularity = row[INDEX_RATING_ANGULARITY].toIntOrNull() ?: 0,
rumpAngle = row[INDEX_RATING_RUMP_ANGLE].toIntOrNull() ?: 0,
rumpWidth = row[INDEX_RATING_RUMP_WIDTH].toIntOrNull() ?: 0,
rearLegSet = row[INDEX_RATING_REAR_LEG_SET].toIntOrNull() ?: 0,
rearLegRearView = row[INDEX_RATING_REAR_LEG_REAR].toIntOrNull() ?: 0,
footAngle = row[INDEX_RATING_FOOT_ANGLE].toIntOrNull() ?: 0,
foreUdderAttachment = row[INDEX_RATING_FORE_UDDER].toIntOrNull() ?: 0,
rearUdderHeight = row[INDEX_RATING_REAR_UDDER_HEIGHT].toIntOrNull() ?: 0,
centralLigament = row[INDEX_RATING_CENTRAL_LIG].toIntOrNull() ?: 0,
udderDepth = row[INDEX_RATING_UDDER_DEPTH].toIntOrNull() ?: 0,
frontTeatPosition = row[INDEX_RATING_FRONT_TEAT].toIntOrNull() ?: 0,
teatLength = row[INDEX_RATING_TEAT_LEN].toIntOrNull() ?: 0,
rearTeatPosition = row[INDEX_RATING_REAR_TEAT].toIntOrNull() ?: 0,
locomotion = row[INDEX_RATING_LOCOMOTION].toIntOrNull() ?: 0,
bodyConditionScore = row[INDEX_RATING_BCS].toIntOrNull() ?: 0,
hockDevelopment = row[INDEX_RATING_HOCK].toIntOrNull() ?: 0,
boneStructure = row[INDEX_RATING_BONE].toIntOrNull() ?: 0,
rearUdderWidth = row[INDEX_RATING_REAR_UDDER_WIDTH].toIntOrNull() ?: 0,
teatThickness = row[INDEX_RATING_TEAT_THICKNESS].toIntOrNull() ?: 0,
muscularity = row[INDEX_RATING_MUSCULARITY].toIntOrNull() ?: 0,
bodyConditionComments = row[INDEX_RATING_BODY_COND_COMMENTS]
)
}
private fun updateProfile(row: Array<String>, p: AnimalProfile): Array<String> {
row[INDEX_ID] = p.animalId
row[INDEX_NAME] = p.name
row[INDEX_SPECIES] = p.species
row[INDEX_BREED] = p.breed
row[INDEX_SEX] = p.sex
row[INDEX_WEIGHT] = p.weight.toString()
row[INDEX_AGE] = p.age.toString()
row[INDEX_IMAGES] = p.imageUrls.joinToString(";")
return row
}
private fun updateDetails(row: Array<String>, d: AnimalDetails): Array<String> {
row[INDEX_ID] = d.animalId
row[INDEX_NAME] = d.name
row[INDEX_SPECIES] = d.species
row[INDEX_BREED] = d.breed
row[INDEX_SEX] = d.sex
row[INDEX_WEIGHT] = d.weight.toString()
row[INDEX_AGE] = d.age.toString()
row[INDEX_MILK] = d.milkYield.toString()
row[INDEX_CALVING] = d.calvingNumber.toString()
row[INDEX_REPRO] = d.reproductiveStatus
row[INDEX_DESC] = d.description
row[INDEX_IMAGES] = d.images.entries.joinToString(";") { (k, v) -> "$k=$v" }
row[INDEX_VIDEO] = d.video
row[INDEX_SEGMENTED_IMAGES] = d.segmentedImages.entries.joinToString(";") { (k, v) -> "$k=$v" }
return row
}
private fun updateRating(row: Array<String>, r: AnimalRating): Array<String> {
row[INDEX_ID] = r.animalId
row[INDEX_RATING_OVERALL] = r.overallRating.toString()
row[INDEX_RATING_HEALTH] = r.healthRating.toString()
row[INDEX_RATING_BREED] = r.breedRating.toString()
row[INDEX_RATING_STATURE] = r.stature.toString()
row[INDEX_RATING_CHEST] = r.chestWidth.toString()
row[INDEX_RATING_BODY_DEPTH] = r.bodyDepth.toString()
row[INDEX_RATING_ANGULARITY] = r.angularity.toString()
row[INDEX_RATING_RUMP_ANGLE] = r.rumpAngle.toString()
row[INDEX_RATING_RUMP_WIDTH] = r.rumpWidth.toString()
row[INDEX_RATING_REAR_LEG_SET] = r.rearLegSet.toString()
row[INDEX_RATING_REAR_LEG_REAR] = r.rearLegRearView.toString()
row[INDEX_RATING_FOOT_ANGLE] = r.footAngle.toString()
row[INDEX_RATING_FORE_UDDER] = r.foreUdderAttachment.toString()
row[INDEX_RATING_REAR_UDDER_HEIGHT] = r.rearUdderHeight.toString()
row[INDEX_RATING_CENTRAL_LIG] = r.centralLigament.toString()
row[INDEX_RATING_UDDER_DEPTH] = r.udderDepth.toString()
row[INDEX_RATING_FRONT_TEAT] = r.frontTeatPosition.toString()
row[INDEX_RATING_TEAT_LEN] = r.teatLength.toString()
row[INDEX_RATING_REAR_TEAT] = r.rearTeatPosition.toString()
row[INDEX_RATING_LOCOMOTION] = r.locomotion.toString()
row[INDEX_RATING_BCS] = r.bodyConditionScore.toString()
row[INDEX_RATING_HOCK] = r.hockDevelopment.toString()
row[INDEX_RATING_BONE] = r.boneStructure.toString()
row[INDEX_RATING_REAR_UDDER_WIDTH] = r.rearUdderWidth.toString()
row[INDEX_RATING_TEAT_THICKNESS] = r.teatThickness.toString()
row[INDEX_RATING_MUSCULARITY] = r.muscularity.toString()
row[INDEX_RATING_BODY_COND_COMMENTS] = r.bodyConditionComments
return row
}
private fun createProfile(p: AnimalProfile) = updateProfile(empty(), p)
private fun createDetails(d: AnimalDetails) = updateDetails(empty(), d)
private fun createRating(r: AnimalRating) = updateRating(empty(), r)
companion object {
// Same columns as before
const val INDEX_ID = 0
const val INDEX_NAME = 1
const val INDEX_SPECIES = 2
const val INDEX_BREED = 3
const val INDEX_SEX = 4
const val INDEX_WEIGHT = 5
const val INDEX_AGE = 6
const val INDEX_MILK = 7
const val INDEX_CALVING = 8
const val INDEX_REPRO = 9
const val INDEX_DESC = 10
const val INDEX_IMAGES = 11
const val INDEX_VIDEO = 12
const val INDEX_RATING_OVERALL = 13
const val INDEX_RATING_HEALTH = 14
const val INDEX_RATING_BREED = 15
const val INDEX_RATING_STATURE = 16
const val INDEX_RATING_CHEST = 17
const val INDEX_RATING_BODY_DEPTH = 18
const val INDEX_RATING_ANGULARITY = 19
const val INDEX_RATING_RUMP_ANGLE = 20
const val INDEX_RATING_RUMP_WIDTH = 21
const val INDEX_RATING_REAR_LEG_SET = 22
const val INDEX_RATING_REAR_LEG_REAR = 23
const val INDEX_RATING_FOOT_ANGLE = 24
const val INDEX_RATING_FORE_UDDER = 25
const val INDEX_RATING_REAR_UDDER_HEIGHT = 26
const val INDEX_RATING_CENTRAL_LIG = 27
const val INDEX_RATING_UDDER_DEPTH = 28
const val INDEX_RATING_FRONT_TEAT = 29
const val INDEX_RATING_TEAT_LEN = 30
const val INDEX_RATING_REAR_TEAT = 31
const val INDEX_RATING_LOCOMOTION = 32
const val INDEX_RATING_BCS = 33
const val INDEX_RATING_HOCK = 34
const val INDEX_RATING_BONE = 35
const val INDEX_RATING_REAR_UDDER_WIDTH = 36
const val INDEX_RATING_TEAT_THICKNESS = 37
const val INDEX_RATING_MUSCULARITY = 38
const val INDEX_RATING_BODY_COND_COMMENTS = 39
const val INDEX_SEGMENTED_IMAGES = 40
const val TOTAL_COLUMNS = 41
val HEADER = arrayOf(
"ID", "Name", "Species", "Breed", "Sex", "Weight", "Age", "MilkYield",
"CalvingNum", "ReproStatus", "Description", "Images", "Video",
"OverallRating", "HealthRating", "BreedRating", "Stature", "ChestWidth",
"BodyDepth", "Angularity", "RumpAngle", "RumpWidth", "RearLegSet",
"RearLegRearView", "FootAngle", "ForeUdderAttachment", "RearUdderHeight",
"CentralLigament", "UdderDepth", "FrontTeatPosition", "TeatLength",
"RearTeatPosition", "Locomotion", "BodyConditionScore", "HockDevelopment",
"BoneStructure", "RearUdderWidth", "TeatThickness", "Muscularity",
"BodyConditionComments", "SegmentedImages"
)
}
}

View File

@ -0,0 +1,6 @@
package com.example.livingai.data.local.model
data class SettingsData(
val language: String,
val isAutoCaptureOn: Boolean
)

View File

@ -0,0 +1,31 @@
package com.example.livingai.data.manager
import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.core.booleanPreferencesKey
import androidx.datastore.preferences.core.edit
import com.example.livingai.domain.manager.LocalUserManager
import com.example.livingai.utils.Constants
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.map
class LocalUserManagerImpl(
private val dataStore: DataStore<Preferences>
): LocalUserManager {
override suspend fun saveAppEntry() {
dataStore.edit { settings ->
settings[PreferencesKeys.APP_ENTRY] = true
}
}
override fun readAppEntry(): Flow<Boolean> {
return dataStore.data.map { preferences ->
preferences[PreferencesKeys.APP_ENTRY] ?: false
}
}
}
private object PreferencesKeys {
val APP_ENTRY = booleanPreferencesKey(name = Constants.APP_ENTRY)
}

View File

@ -0,0 +1,94 @@
package com.example.livingai.data.ml
import android.content.Context
import android.graphics.Bitmap
import android.graphics.Rect
import com.example.livingai.domain.ml.AIModel
import org.tensorflow.lite.Interpreter
import org.tensorflow.lite.support.common.FileUtil
import org.tensorflow.lite.support.image.ImageProcessor
import org.tensorflow.lite.support.image.TensorImage
import org.tensorflow.lite.support.image.ops.ResizeOp
import java.nio.ByteBuffer
import java.nio.ByteOrder
class AIModelImpl(private val context: Context) : AIModel {
private val objectDetector: Interpreter
private val labels: List<String>
init {
// Load the TFLite model from assets
val modelBuffer = FileUtil.loadMappedFile(context, "efficientdet-lite0.tflite")
val options = Interpreter.Options().apply { numThreads = 4 }
objectDetector = Interpreter(modelBuffer, options)
// Load labels from assets
labels = try {
FileUtil.loadLabels(context, "labels.txt")
} catch (e: Exception) {
e.printStackTrace()
emptyList()
}
}
override suspend fun detectObject(bitmap: Bitmap): ObjectDetectionResult? {
// Preprocess the image
val imageProcessor = ImageProcessor.Builder()
.add(ResizeOp(320, 320, ResizeOp.ResizeMethod.BILINEAR))
.build()
var tensorImage = TensorImage.fromBitmap(bitmap)
tensorImage = imageProcessor.process(tensorImage)
// Prepare model inputs and outputs
// Based on crash: [1, 25, 4] vs [1, 10, 4]. The model outputs 25 detections, not 10.
val locations = Array(1) { Array(25) { FloatArray(4) } }
val classes = Array(1) { FloatArray(25) }
val scores = Array(1) { FloatArray(25) }
val numDetections = FloatArray(1)
val outputs = mapOf(
0 to locations,
1 to classes,
2 to scores,
3 to numDetections
)
// Run inference
objectDetector.runForMultipleInputsOutputs(arrayOf(tensorImage.buffer), outputs)
// Post-process the results
val bestDetection = scores[0].withIndex()
.maxByOrNull { it.value }
?.takeIf { it.value > 0.5f } // Confidence threshold
if (bestDetection != null) {
val index = bestDetection.index
val score = bestDetection.value
val location = locations[0][index] // [ymin, xmin, ymax, xmax]
val labelIndex = classes[0][index].toInt()
val label = labels.getOrElse(labelIndex) { "Unknown" }
// Convert normalized coordinates to absolute pixel values
val ymin = location[0] * bitmap.height
val xmin = location[1] * bitmap.width
val ymax = location[2] * bitmap.height
val xmax = location[3] * bitmap.width
val boundingBox = Rect(xmin.toInt(), ymin.toInt(), xmax.toInt(), ymax.toInt())
return ObjectDetectionResult(boundingBox, label, score)
}
return null
}
// This is no longer the primary function, but kept for interface compliance
override suspend fun segmentImage(bitmap: Bitmap): Triple<Bitmap, BooleanArray, Rect>? {
// Returning null as we are focusing on object detection now
return null
}
override fun deriveInference(bitmap: Bitmap): String = "Object Detection"
}

View File

@ -0,0 +1,53 @@
package com.example.livingai.data.ml
import android.graphics.Bitmap
import com.example.livingai.domain.ml.DistanceState
import com.example.livingai.domain.ml.FrameData
import com.example.livingai.domain.ml.Orientation
import com.example.livingai.domain.ml.OrientationPixelEstimator
import com.example.livingai.domain.ml.OrientationState
class DistanceEstimatorImpl {
private val orientationEstimator = OrientationPixelEstimator(iouThreshold = 0.60f)
fun processFrame(
frameData: FrameData,
requestedOrientation: Orientation,
silhouetteBitmap: Bitmap
): OrientationState {
val segMaskBitmap = frameData.segmentationMaskBitmap
?: return OrientationState(
success = false,
reason = "No segmentation mask",
pixelMetrics = null,
orientationMatched = false
)
val bbox = frameData.segmentationBox
?: return OrientationState(
success = false,
reason = "No bounding box",
pixelMetrics = null,
orientationMatched = false
)
val result = orientationEstimator.analyze(
segmentationMaskBitmap = segMaskBitmap,
silhouetteBitmap = silhouetteBitmap,
bbox = bbox,
frameWidth = frameData.imageWidth,
frameHeight = frameData.imageHeight,
medianDepthMeters = frameData.medianDepth
)
return OrientationState(
success = result.orientationMatched,
reason = if (result.orientationMatched) "OK" else "Orientation mismatch",
pixelMetrics = result.pixelMetrics,
orientationMatched = result.orientationMatched,
iouScore = result.iouScore
)
}
}

View File

@ -0,0 +1,133 @@
package com.example.livingai.data.ml
import android.content.Context
import android.graphics.Bitmap
import android.graphics.Rect
import org.tensorflow.lite.Interpreter
import org.tensorflow.lite.support.common.FileUtil
import org.tensorflow.lite.support.common.ops.NormalizeOp
import org.tensorflow.lite.support.image.ImageProcessor
import org.tensorflow.lite.support.image.TensorImage
import org.tensorflow.lite.support.image.ops.ResizeOp
import java.nio.ByteBuffer
import java.nio.ByteOrder
data class MidasDepthResult(
val relativeDepth: Float,
val absoluteDistanceMeters: Float?
)
class MidasDepthEstimator(private val context: Context) {
private var interpreter: Interpreter? = null
companion object {
private const val MODEL_NAME = ""
private const val INPUT_SIZE = 256
private val NORM_MEAN = floatArrayOf(123.675f, 116.28f, 103.53f)
private val NORM_STD = floatArrayOf(58.395f, 57.12f, 57.375f)
}
init {
setupInterpreter()
}
private fun setupInterpreter() {
try {
val files = context.assets.list("") ?: emptyArray()
if (!files.contains(MODEL_NAME)) return
val model = FileUtil.loadMappedFile(context, MODEL_NAME)
interpreter = Interpreter(model, Interpreter.Options().apply { setNumThreads(4) })
} catch (e: Exception) {
e.printStackTrace()
}
}
fun analyzeObject(
bitmap: Bitmap,
bbox: Rect,
realObjectHeightMeters: Float?,
focalLengthPixels: Float?
): MidasDepthResult? {
val interp = interpreter ?: return null
try {
// 1. Preprocess
var tensorImage = TensorImage(org.tensorflow.lite.DataType.FLOAT32)
tensorImage.load(bitmap)
val processor = ImageProcessor.Builder()
.add(ResizeOp(INPUT_SIZE, INPUT_SIZE, ResizeOp.ResizeMethod.BILINEAR))
.add(NormalizeOp(NORM_MEAN, NORM_STD))
.build()
tensorImage = processor.process(tensorImage)
// 2. Output Buffer
val outShape = interp.getOutputTensor(0).shape()
val size = outShape[1] * outShape[2]
val output = ByteBuffer.allocateDirect(size * 4).order(ByteOrder.nativeOrder())
// 3. Run MiDaS
interp.run(tensorImage.buffer, output)
output.rewind()
val depthArray = FloatArray(size)
output.asFloatBuffer().get(depthArray)
// Calculate median relative depth (inverse depth) from the BBOX region only?
// Usually MiDaS runs on full frame.
// If we want depth of the object, we should look at pixels corresponding to the bbox.
// But mapping bbox to 256x256 map requires scaling.
// For now, let's keep it simple: Median of WHOLE FRAME (as relative depth context)
// OR median of the center?
// The previous implementation used median of whole frame.
// Let's refine it: Use median of the whole frame as 'relative depth'
// OR if you want object depth, we need to crop.
// Given the user wants "relative depth", median of frame is a common proxy for scene depth.
// But "distance to object" -> usually means object depth.
// Let's sample the center of the bbox in the depth map.
// Map BBox center to 256x256
val cx = bbox.centerX()
val cy = bbox.centerY()
val mapX = (cx * INPUT_SIZE) / bitmap.width
val mapY = (cy * INPUT_SIZE) / bitmap.height
// Clamp
val safeX = mapX.coerceIn(0, INPUT_SIZE - 1)
val safeY = mapY.coerceIn(0, INPUT_SIZE - 1)
val depthIndex = safeY * INPUT_SIZE + safeX
val objectRelativeDepth = depthArray[depthIndex]
// Note: MiDaS output is inverse depth (disparity).
// Higher value = Closer.
// 4. Absolute Distance (Pinhole)
val hPx = bbox.height().toFloat()
val absDistance = if (realObjectHeightMeters != null && focalLengthPixels != null && hPx > 0) {
(focalLengthPixels * realObjectHeightMeters) / hPx
} else {
null
}
return MidasDepthResult(
relativeDepth = objectRelativeDepth,
absoluteDistanceMeters = absDistance
)
} catch (e: Exception) {
e.printStackTrace()
return null
}
}
// Kept for compatibility if needed, but analyzeObject is the new main entry
fun estimateDepth(bitmap: Bitmap): Float? {
// Fallback or simpler version
return analyzeObject(bitmap, Rect(0,0,bitmap.width, bitmap.height), null, null)?.relativeDepth
}
}

View File

@ -0,0 +1,9 @@
package com.example.livingai.data.ml
import android.graphics.Rect
data class ObjectDetectionResult(
val boundingBox: Rect,
val label: String,
val confidence: Float
)

View File

@ -0,0 +1,60 @@
package com.example.livingai.data.repository
import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.core.booleanPreferencesKey
import androidx.datastore.preferences.core.edit
import androidx.datastore.preferences.core.emptyPreferences
import androidx.datastore.preferences.core.floatPreferencesKey
import androidx.datastore.preferences.core.stringPreferencesKey
import com.example.livingai.domain.model.SettingsData
import com.example.livingai.domain.repository.AppDataRepository
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.catch
import kotlinx.coroutines.flow.map
import java.io.IOException
class AppDataRepositoryImpl(private val dataStore: DataStore<Preferences>) : AppDataRepository {
private object PreferencesKeys {
val APP_ENTRY = booleanPreferencesKey("app_entry")
val LANGUAGE = stringPreferencesKey("language")
val IS_AUTO_CAPTURE_ON = booleanPreferencesKey("is_auto_capture_on")
val JACCARD_THRESHOLD = floatPreferencesKey("jaccard_threshold")
}
override fun getSettings(): Flow<SettingsData> {
return dataStore.data.catch {
if (it is IOException) {
emit(emptyPreferences())
} else {
throw it
}
}.map {
val language = it[PreferencesKeys.LANGUAGE] ?: "en"
val isAutoCaptureOn = it[PreferencesKeys.IS_AUTO_CAPTURE_ON] ?: false
val jaccardThreshold = it[PreferencesKeys.JACCARD_THRESHOLD] ?: 50f
SettingsData(language, isAutoCaptureOn, jaccardThreshold)
}
}
override suspend fun saveSettings(settings: SettingsData) {
dataStore.edit {
it[PreferencesKeys.LANGUAGE] = settings.language
it[PreferencesKeys.IS_AUTO_CAPTURE_ON] = settings.isAutoCaptureOn
it[PreferencesKeys.JACCARD_THRESHOLD] = settings.jaccardThreshold
}
}
override suspend fun saveAppEntry() {
dataStore.edit { settings ->
settings[PreferencesKeys.APP_ENTRY] = true
}
}
override fun readAppEntry(): Flow<Boolean> {
return dataStore.data.map { preferences ->
preferences[PreferencesKeys.APP_ENTRY] ?: false
}
}
}

View File

@ -0,0 +1,44 @@
package com.example.livingai.data.repository
import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.core.booleanPreferencesKey
import androidx.datastore.preferences.core.edit
import androidx.datastore.preferences.core.emptyPreferences
import androidx.datastore.preferences.core.stringPreferencesKey
import com.example.livingai.data.local.model.SettingsData
import com.example.livingai.domain.repository.SettingsRepository
import com.example.livingai.utils.Constants.APP_ENTRY
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.catch
import kotlinx.coroutines.flow.map
import java.io.IOException
class SettingsRepositoryImpl(private val dataStore: DataStore<Preferences>) : SettingsRepository {
private object PreferencesKeys {
val LANGUAGE = stringPreferencesKey("language")
val IS_AUTO_CAPTURE_ON = booleanPreferencesKey("is_auto_capture_on")
}
override fun getSettings(): Flow<SettingsData> {
return dataStore.data.catch {
if (it is IOException) {
emit(emptyPreferences())
} else {
throw it
}
}.map {
val language = it[PreferencesKeys.LANGUAGE] ?: "en"
val isAutoCaptureOn = it[PreferencesKeys.IS_AUTO_CAPTURE_ON] ?: false
SettingsData(language, isAutoCaptureOn)
}
}
override suspend fun saveSettings(settings: SettingsData) {
dataStore.edit {
it[PreferencesKeys.LANGUAGE] = settings.language
it[PreferencesKeys.IS_AUTO_CAPTURE_ON] = settings.isAutoCaptureOn
}
}
}

View File

@ -0,0 +1,20 @@
package com.example.livingai.data.repository.business
import com.example.livingai.domain.model.AnimalDetails
import com.example.livingai.domain.repository.business.AnimalDetailsRepository
import com.example.livingai.domain.repository.business.DataSource
import kotlinx.coroutines.flow.Flow
class AnimalDetailsRepositoryImpl(
private val dataSource: DataSource
) : AnimalDetailsRepository {
override fun getAnimalDetails(id: String): Flow<AnimalDetails?> {
return dataSource.getAnimalDetails(id)
}
override suspend fun saveAnimalDetails(animalDetails: AnimalDetails) {
dataSource.setAnimalDetails(animalDetails)
}
override suspend fun deleteAnimalDetails(id: String) { }
}

View File

@ -0,0 +1,23 @@
package com.example.livingai.data.repository.business
import androidx.paging.PagingData
import com.example.livingai.domain.model.AnimalProfile
import com.example.livingai.domain.repository.business.AnimalProfileRepository
import com.example.livingai.domain.repository.business.DataSource
import kotlinx.coroutines.flow.Flow
class AnimalProfileRepositoryImpl(
private val dataSource: DataSource
) : AnimalProfileRepository {
override fun getAnimalProfiles(): Flow<PagingData<AnimalProfile>> {
return dataSource.getAnimalProfiles()
}
override suspend fun saveAnimalProfile(animalProfile: AnimalProfile) {
dataSource.setAnimalProfile(animalProfile)
}
override suspend fun deleteAnimalProfile(id: String) {
dataSource.deleteAnimalProfile(id)
}
}

View File

@ -0,0 +1,22 @@
package com.example.livingai.data.repository.business
import com.example.livingai.domain.model.AnimalRating
import com.example.livingai.domain.repository.business.AnimalRatingRepository
import com.example.livingai.domain.repository.business.DataSource
import kotlinx.coroutines.flow.Flow
class AnimalRatingRepositoryImpl(
private val dataSource: DataSource
) : AnimalRatingRepository {
override fun getAnimalRating(id: String): Flow<AnimalRating?> {
return dataSource.getAnimalRatings(id)
}
override suspend fun saveAnimalRating(animalRating: AnimalRating) {
dataSource.setAnimalRatings(animalRating)
}
override suspend fun deleteAnimalRating(id: String) {
// Same as details, placeholder for now as only full profile delete is requested in DataSource
}
}

View File

@ -0,0 +1,193 @@
package com.example.livingai.data.repository.media
import android.content.ContentValues
import android.content.Context
import android.graphics.Bitmap
import android.graphics.Matrix
import android.graphics.Rect
import android.provider.MediaStore
import androidx.camera.core.ImageProxy
import com.example.livingai.data.ml.DistanceEstimatorImpl
import com.example.livingai.data.ml.MidasDepthEstimator
import com.example.livingai.domain.ml.AIModel
import com.example.livingai.domain.ml.FrameMetadataProvider
import com.example.livingai.domain.ml.FrameMetadataProvider.toFrameData
import com.example.livingai.domain.ml.Orientation
import com.example.livingai.domain.ml.OrientationState
import com.example.livingai.domain.repository.CameraRepository
import com.example.livingai.utils.TiltSensorManager
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
class CameraRepositoryImpl(
private val aiModel: AIModel,
private val tiltSensorManager: TiltSensorManager,
private val context: Context
) : CameraRepository {
private val distanceEstimator = DistanceEstimatorImpl()
private val midasEstimator = MidasDepthEstimator(context)
init {
FrameMetadataProvider.aiModel = aiModel
FrameMetadataProvider.tiltSensorManager = tiltSensorManager
}
override suspend fun captureImage(imageProxy: ImageProxy): Bitmap =
withContext(Dispatchers.IO) {
val rotation = imageProxy.imageInfo.rotationDegrees
val bitmap = imageProxy.toBitmap()
imageProxy.close()
if (rotation != 0) {
val matrix = Matrix().apply { postRotate(rotation.toFloat()) }
Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true)
} else bitmap
}
override suspend fun processFrame(
bitmap: Bitmap,
requestedOrientation: Orientation,
silhouetteBitmap: Bitmap,
realObjectHeightMeters: Float?,
focalLengthPixels: Float,
boundingBox: Rect?
): OrientationState = withContext(Dispatchers.Default) {
// 1. Collect segmentation
// Use the passed boundingBox if available, otherwise it relies on FrameMetadataProvider running segmentation again
// But FrameMetadataProvider.collectMetadata runs segmentation internally.
// To avoid re-running detection/segmentation if we already have bbox, we can pass it.
// However, FrameMetadataProvider currently calls getSegmentation(bitmap) which calls aiModel.segmentImage(bitmap).
// AIModel.segmentImage is returning null in current impl.
// ISSUE: processFrame relies on FrameMetadataProvider.collectMetadata -> getSegmentation -> aiModel.segmentImage
// But AIModelImpl.segmentImage returns null!
// So bbox will be null, and processFrame returns early with "Segmentation missing".
// FIX: We need to use the detection result we already have from CameraViewModel.
// We will mock the segmentation result using the bounding box from object detection.
// And for the mask, since we don't have segmentation, we can either:
// a) Create a dummy mask filled within the bbox (simple box mask)
// b) Or just proceed if DistanceEstimator can handle it (it needs mask).
// Let's create a synthetic mask from the bbox.
val syntheticMeta = if (boundingBox != null) {
// Create a simple mask where pixels inside bbox are true
// This is computationally expensive to do full bitmap, so be careful.
// But we need a Bitmap mask for DistanceEstimator.
// Let's create a black bitmap with white rect.
// NOTE: This runs on Default dispatcher, so should be okay-ish.
// However, FrameMetadataProvider.collectMetadata does more (IMU, Depth).
// Let's manually construct metadata.
val maskBitmap = Bitmap.createBitmap(bitmap.width, bitmap.height, Bitmap.Config.ARGB_8888)
val canvas = android.graphics.Canvas(maskBitmap)
val paint = android.graphics.Paint().apply { color = android.graphics.Color.WHITE }
canvas.drawRect(boundingBox, paint)
val imu = FrameMetadataProvider.getIMU()
val rot = FrameMetadataProvider.getRotation()
val depth = FrameMetadataProvider.getDepthData()
FrameMetadataProvider.FrameCollectedMetadata(
segmentationMaskBitmap = maskBitmap,
segmentationBox = boundingBox,
depthMeters = depth.depthMeters,
depthWidth = depth.width,
depthHeight = depth.height,
depthConfidence = depth.confidence,
pitch = imu.pitch,
roll = imu.roll,
yaw = imu.yaw,
rotationDegrees = rot
)
} else {
FrameMetadataProvider.collectMetadata(bitmap)
}
val bbox = syntheticMeta.segmentationBox
// val mask = syntheticMeta.segmentationMaskBitmap // Mask is used inside distanceEstimator
if (bbox == null) {
return@withContext OrientationState(
success = false,
reason = "Segmentation missing",
pixelMetrics = null,
orientationMatched = false,
iouScore = null,
relativeDepth = null,
absoluteDistanceMeters = null
)
}
// 2. MiDaS (relative + absolute if reference height provided)
val midasResult = midasEstimator.analyzeObject(
bitmap = bitmap,
bbox = bbox,
realObjectHeightMeters = realObjectHeightMeters,
focalLengthPixels = focalLengthPixels
)
// 3. Build FrameData with relative depth only
val frameData = syntheticMeta.toFrameData(bitmap).copy(
medianDepth = midasResult?.relativeDepth
)
// 4. Orientation detection
val orientationState = distanceEstimator.processFrame(
frameData = frameData,
requestedOrientation = requestedOrientation,
silhouetteBitmap = silhouetteBitmap
)
// 5. Inject relative + absolute values into final result
orientationState.copy(
relativeDepth = midasResult?.relativeDepth,
absoluteDistanceMeters = midasResult?.absoluteDistanceMeters
)
}
override suspend fun saveImage(
bitmap: Bitmap,
animalId: String,
orientation: String?
): String = withContext(Dispatchers.IO) {
val suffix = orientation?.let { "_$it" } ?: ""
val fileName = "$animalId$suffix.jpg"
val values = ContentValues().apply {
put(MediaStore.Images.Media.DISPLAY_NAME, fileName)
put(MediaStore.Images.Media.MIME_TYPE, "image/jpeg")
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.Q) {
put(MediaStore.Images.Media.RELATIVE_PATH, "Pictures/LivingAI/Media/$animalId")
put(MediaStore.Images.Media.IS_PENDING, 1)
}
}
val resolver = context.contentResolver
val uri = resolver.insert(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values)
?: throw RuntimeException("Failed to insert image")
try {
resolver.openOutputStream(uri)?.use { out ->
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
}
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.Q) {
values.clear()
values.put(MediaStore.Images.Media.IS_PENDING, 0)
resolver.update(uri, values, null, null)
}
} catch (e: Exception) {
resolver.delete(uri, null, null)
throw e
}
uri.toString()
}
}

View File

@ -0,0 +1,31 @@
package com.example.livingai.data.repository.media
import android.graphics.Bitmap
import com.example.livingai.domain.ml.AIModel
import com.example.livingai.domain.repository.VideoRepository
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.withContext
class VideoRepositoryImpl(private val aiModel: AIModel) : VideoRepository {
private var isRecording = false
override fun startRecording(onRecordingStarted: () -> Unit) {
isRecording = true
// Logic to start recording video if needed
onRecordingStarted()
}
override fun stopRecording() {
isRecording = false
// Logic to stop recording
}
override suspend fun processFrame(bitmap: Bitmap): String = withContext(Dispatchers.Default) {
if (isRecording) {
aiModel.deriveInference(bitmap)
} else {
""
}
}
}

View File

@ -0,0 +1,193 @@
package com.example.livingai.di
import android.content.Context
import android.util.Log
import androidx.datastore.core.DataStore
import androidx.datastore.preferences.core.Preferences
import androidx.datastore.preferences.preferencesDataStore
import androidx.window.layout.WindowMetricsCalculator
import coil.ImageLoader
import coil.decode.SvgDecoder
import com.example.livingai.data.camera.DefaultCaptureHandler
import com.example.livingai.data.camera.DefaultMeasurementCalculator
import com.example.livingai.data.camera.DefaultOrientationChecker
import com.example.livingai.data.camera.DefaultTiltChecker
import com.example.livingai.data.camera.MockPoseAnalyzer
import com.example.livingai.data.camera.TFLiteObjectDetector
import com.example.livingai.data.local.CSVDataSource
import com.example.livingai.data.ml.AIModelImpl
import com.example.livingai.data.repository.AppDataRepositoryImpl
import com.example.livingai.data.repository.business.AnimalDetailsRepositoryImpl
import com.example.livingai.data.repository.business.AnimalProfileRepositoryImpl
import com.example.livingai.data.repository.business.AnimalRatingRepositoryImpl
import com.example.livingai.data.repository.media.CameraRepositoryImpl
import com.example.livingai.data.repository.media.VideoRepositoryImpl
import com.example.livingai.domain.camera.CaptureHandler
import com.example.livingai.domain.camera.MeasurementCalculator
import com.example.livingai.domain.camera.OrientationChecker
import com.example.livingai.domain.camera.PoseAnalyzer
import com.example.livingai.domain.camera.TiltChecker
import com.example.livingai.domain.ml.AIModel
import com.example.livingai.domain.ml.AnalyzerThresholds
import com.example.livingai.domain.ml.FeedbackAnalyzer
import com.example.livingai.domain.ml.FeedbackAnalyzerImpl
import com.example.livingai.domain.ml.ObjectDetector
import com.example.livingai.domain.ml.ObjectDetectorImpl
import com.example.livingai.domain.repository.AppDataRepository
import com.example.livingai.domain.repository.CameraRepository
import com.example.livingai.domain.repository.VideoRepository
import com.example.livingai.domain.repository.business.AnimalDetailsRepository
import com.example.livingai.domain.repository.business.AnimalProfileRepository
import com.example.livingai.domain.repository.business.AnimalRatingRepository
import com.example.livingai.domain.repository.business.DataSource
import com.example.livingai.domain.usecases.AppDataUseCases
import com.example.livingai.domain.usecases.DeleteAnimalProfile
import com.example.livingai.domain.usecases.GetAnimalDetails
import com.example.livingai.domain.usecases.GetAnimalProfiles
import com.example.livingai.domain.usecases.GetAnimalRatings
import com.example.livingai.domain.usecases.GetSettingsUseCase
import com.example.livingai.domain.usecases.ProfileEntry.ProfileEntryUseCase
import com.example.livingai.domain.usecases.ProfileListing.ProfileListingUseCase
import com.example.livingai.domain.usecases.ReadAppEntryUseCase
import com.example.livingai.domain.usecases.SaveAppEntryUseCase
import com.example.livingai.domain.usecases.SaveSettingsUseCase
import com.example.livingai.domain.usecases.SetAnimalDetails
import com.example.livingai.domain.usecases.SetAnimalRatings
import com.example.livingai.pages.addprofile.AddProfileViewModel
import com.example.livingai.pages.camera.CameraViewModel
import com.example.livingai.pages.camera.VideoViewModel
import com.example.livingai.pages.home.HomeViewModel
import com.example.livingai.pages.imagepreview.ImagePreviewViewModel
import com.example.livingai.pages.listings.ListingsViewModel
import com.example.livingai.pages.onboarding.OnBoardingViewModel
import com.example.livingai.pages.ratings.RatingViewModel
import com.example.livingai.pages.settings.SettingsViewModel
import com.example.livingai.pages.videopreview.VideoPreviewViewModel
import com.example.livingai.utils.Constants
import com.example.livingai.utils.CoroutineDispatchers
import com.example.livingai.utils.DefaultCoroutineDispatchers
import com.example.livingai.utils.ScreenDimensions
import com.example.livingai.utils.SilhouetteManager
import com.example.livingai.utils.TiltSensorManager
import org.koin.android.ext.koin.androidContext
import org.koin.core.module.dsl.viewModel
import org.koin.dsl.module
private val Context.dataStore: DataStore<Preferences> by preferencesDataStore(name = Constants.USER_SETTINGS)
val appModule = module {
single<DataStore<Preferences>> { androidContext().dataStore }
single<AppDataRepository> { AppDataRepositoryImpl(get()) }
single {
AppDataUseCases(
getSettings = GetSettingsUseCase(get()),
saveSettings = SaveSettingsUseCase(get()),
readAppEntry = ReadAppEntryUseCase(get()),
saveAppEntry = SaveAppEntryUseCase(get())
)
}
// Coroutine dispatchers (for testability)
single<CoroutineDispatchers> { DefaultCoroutineDispatchers() }
// Data Source
single<DataSource> {
CSVDataSource(
context = androidContext(),
fileName = Constants.ANIMAL_DATA_FILENAME,
dispatchers = get()
)
}
// Coil ImageLoader singleton
single {
ImageLoader.Builder(androidContext())
.components {
add(SvgDecoder.Factory())
}
.build()
}
factory<OrientationChecker> { DefaultOrientationChecker() }
factory<TiltChecker> { DefaultTiltChecker() }
factory<com.example.livingai.domain.camera.ObjectDetector> { TFLiteObjectDetector(androidContext()) }
factory<PoseAnalyzer> { MockPoseAnalyzer() }
// Handlers
factory<CaptureHandler> { DefaultCaptureHandler() }
factory<MeasurementCalculator> { DefaultMeasurementCalculator() }
// Initialize silhouettes once
single<ScreenDimensions>(createdAtStart = true) {
val ctx: Context = androidContext()
val metrics = WindowMetricsCalculator.getOrCreate()
.computeCurrentWindowMetrics(ctx)
val bounds = metrics.bounds
val screenWidth = bounds.width()
val screenHeight = bounds.height()
SilhouetteManager.initialize(ctx, screenWidth, screenHeight)
ScreenDimensions(screenWidth, screenHeight)
}
// ML Model
single<AIModel> { AIModelImpl(androidContext()) }
single<ObjectDetector> {
ObjectDetectorImpl(
context = androidContext(),
onResults = { _, _ -> }, // Callback will be set by ViewModel
onError = { error -> Log.e("ObjectDetector", "Error: $error") }
)
}
single<FeedbackAnalyzer> { FeedbackAnalyzerImpl(AnalyzerThresholds()) }
single { TiltSensorManager(androidContext()) }
// Repositories
single<AnimalProfileRepository> { AnimalProfileRepositoryImpl(get()) }
single<AnimalDetailsRepository> { AnimalDetailsRepositoryImpl(get()) }
single<AnimalRatingRepository> { AnimalRatingRepositoryImpl(get()) }
single<CameraRepository> { CameraRepositoryImpl(get(), get(), androidContext()) }
single<VideoRepository> { VideoRepositoryImpl(get()) }
// Use Cases
single { GetAnimalProfiles(get()) }
single { GetAnimalDetails(get()) }
single { GetAnimalRatings(get()) }
single { SetAnimalDetails(get()) }
single { SetAnimalRatings(get()) }
single { DeleteAnimalProfile(get()) }
//Use Cases
single {
ProfileEntryUseCase(
getAnimalDetails = GetAnimalDetails(get()),
setAnimalDetails = SetAnimalDetails(get())
)
}
single {
ProfileListingUseCase(
getAnimalProfiles = GetAnimalProfiles(get()),
deleteAnimalProfile = DeleteAnimalProfile(get())
)
}
// ViewModels
viewModel { HomeViewModel(get()) }
viewModel { OnBoardingViewModel(get()) }
viewModel { (savedStateHandle: androidx.lifecycle.SavedStateHandle?) ->
AddProfileViewModel(get(), get(), savedStateHandle)
}
viewModel { ListingsViewModel(get()) }
viewModel { SettingsViewModel(get()) }
viewModel { RatingViewModel(get(), get(), get(), get()) }
viewModel { CameraViewModel(get(), get(), get(), get(), get(), get(), get(), get()) }
viewModel { VideoViewModel(get(), get(), get()) }
viewModel { ImagePreviewViewModel() }
viewModel { VideoPreviewViewModel() }
}

View File

@ -0,0 +1,11 @@
package com.example.livingai.di
import com.example.livingai.data.camera.*
import com.example.livingai.domain.camera.*
import com.example.livingai.utils.ScreenDimensions
import org.koin.android.ext.koin.androidContext
import org.koin.dsl.module
val cameraModule = module {
// Pipeline Steps
}

View File

@ -0,0 +1,61 @@
package com.example.livingai.domain.camera
import android.graphics.Bitmap
import com.example.livingai.domain.model.camera.CameraOrientation
import com.example.livingai.domain.model.camera.CaptureData
import com.example.livingai.domain.model.camera.DetectionResult
import com.example.livingai.domain.model.camera.Instruction
import com.example.livingai.domain.model.camera.ObjectMetrics
import com.example.livingai.domain.model.camera.ReferenceObject
interface CameraPipelineStep {
/**
* Analyzes the current frame (or sensor data) and returns an instruction.
*/
suspend fun analyze(input: PipelineInput): Instruction
}
data class PipelineInput(
val image: Bitmap?,
val deviceOrientation: Int, // degrees
val deviceRoll: Float,
val devicePitch: Float,
val deviceAzimuth: Float,
val requiredOrientation: CameraOrientation,
val screenWidthPx: Float,
val screenHeightPx: Float,
val targetAnimal: String, // e.g., "Dog", "Cat"
val orientation: String, // "front", "back", "side", etc.
val previousDetectionResult: DetectionResult? = null // To pass detection result to subsequent steps
)
interface OrientationChecker : CameraPipelineStep
interface TiltChecker : CameraPipelineStep
interface ObjectDetector : CameraPipelineStep
interface PoseAnalyzer : CameraPipelineStep
interface CaptureHandler {
suspend fun capture(input: PipelineInput, detectionResult: DetectionResult): CaptureData
}
interface MeasurementCalculator {
/**
* Calculates the real world dimensions of the animal based on a known reference object dimension.
* @param targetHeight The real height of the reference object provided by the user.
* @param referenceObject The reference object selected by the user.
* @param currentMetrics The current relative metrics of the animal.
* @return The calculated real-world metrics for the animal.
*/
fun calculateRealMetrics(
targetHeight: Float,
referenceObject: ReferenceObject,
currentMetrics: ObjectMetrics
): RealWorldMetrics
}
data class RealWorldMetrics(
val height: Float,
val width: Float,
val distance: Float,
val unit: String = "cm"
)

View File

@ -0,0 +1,8 @@
package com.example.livingai.domain.manager
import kotlinx.coroutines.flow.Flow
interface LocalUserManager {
suspend fun saveAppEntry()
fun readAppEntry(): Flow<Boolean>
}

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.ml
import android.graphics.Bitmap
import android.graphics.Rect
import com.example.livingai.data.ml.ObjectDetectionResult
interface AIModel {
fun deriveInference(bitmap: Bitmap): String
suspend fun segmentImage(bitmap: Bitmap): Triple<Bitmap, BooleanArray, Rect>?
suspend fun detectObject(bitmap: Bitmap): ObjectDetectionResult?
}

View File

@ -0,0 +1,152 @@
package com.example.livingai.domain.ml
import android.graphics.Rect
import com.example.livingai.utils.Constants
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.min
class ArcoreDepthEstimator(
private val params: ArcoreDepthParams = ArcoreDepthParams()
) : DistanceEstimator {
data class ArcoreDepthParams(
val targetDistanceMeters: Float = Constants.TARGET_DISTANCE_METERS,
val strictToleranceMeters: Float = Constants.DISTANCE_TOLERANCE_METERS_STRICT,
val relaxedToleranceMeters: Float = Constants.DISTANCE_TOLERANCE_METERS_RELAXED,
val depthConfidenceMin: Float = Constants.DEPTH_CONFIDENCE_MIN,
val minDepthSamples: Int = Constants.MIN_DEPTH_SAMPLES
)
override fun analyze(frame: FrameData, cameraInfo: CameraInfoData): DistanceState {
val isTilted = abs(frame.imuPitchDegrees) > Constants.MAX_ACCEPTABLE_PITCH_DEGREES ||
abs(frame.imuRollDegrees) > Constants.MAX_ACCEPTABLE_ROLL_DEGREES
val isRotated = (frame.cameraRotationDegrees % 360) != 0
val isOrientationCorrect = !isTilted && !isRotated
val centered = checkCentered(frame, cameraInfo)
val depthEstimate = sampleDepthMedian(frame)
val fallbackEstimate = computeKnownDimensionEstimate(frame, cameraInfo)
val fusedDistance = when {
depthEstimate != null ->
Constants.WEIGHT_ARCORE * depthEstimate +
Constants.WEIGHT_KNOWN_DIM * (fallbackEstimate ?: depthEstimate)
fallbackEstimate != null -> fallbackEstimate
else -> null
}
val (recommendation, ready, conf) =
evaluateDistanceAndReadiness(fusedDistance, centered, isOrientationCorrect)
return DistanceState(
distanceMeters = fusedDistance,
recommendation = recommendation,
isCameraTilted = isTilted,
isCameraRotated = isRotated,
isOrientationCorrect = isOrientationCorrect,
isObjectCentered = centered,
readyToCapture = ready,
confidenceScore = conf
)
}
private fun sampleDepthMedian(frame: FrameData): Float? {
val depth = frame.depthMapMeters ?: return null
val w = frame.depthWidth
val h = frame.depthHeight
if (w <= 0 || h <= 0) return null
val box = frame.segmentationBox ?: return null
val left = max(0, box.left)
val top = max(0, box.top)
val right = min(w - 1, box.right)
val bottom = min(h - 1, box.bottom)
val conf = frame.depthConfidence
val samples = ArrayList<Float>()
for (y in top..bottom) {
for (x in left..right) {
val idx = y * w + x
val d = depth[idx]
if (d <= 0f || d.isNaN()) continue
if (conf != null && conf[idx] < params.depthConfidenceMin)
continue
samples.add(d)
}
}
if (samples.size < params.minDepthSamples) return null
samples.sort()
return samples[samples.size / 2]
}
private fun computeKnownDimensionEstimate(
frame: FrameData,
cameraInfo: CameraInfoData
): Float? {
val box = frame.segmentationBox ?: return null
val hPixels = box.height().toFloat()
if (hPixels <= 1f) return null
val f = cameraInfo.focalLengthPixels
val Hreal = Constants.DEFAULT_OBJECT_REAL_HEIGHT_METERS
return (f * Hreal) / hPixels
}
private fun evaluateDistanceAndReadiness(
distMeters: Float?,
centered: Boolean,
orientationOk: Boolean
): Triple<DistanceRecommendation, Boolean, Float> {
if (distMeters == null)
return Triple(DistanceRecommendation.DISTANCE_UNKNOWN, false, 0f)
val diff = distMeters - params.targetDistanceMeters
val absDiff = abs(diff)
val withinStrict = absDiff <= params.strictToleranceMeters
val withinRelaxed = absDiff <= params.relaxedToleranceMeters
val recommendation = when {
withinStrict -> DistanceRecommendation.AT_OPTIMAL_DISTANCE
diff > 0f -> DistanceRecommendation.MOVE_CLOSER
else -> DistanceRecommendation.MOVE_AWAY
}
val ready = withinRelaxed && centered && orientationOk
val closenessScore = 1f - (absDiff / (params.relaxedToleranceMeters * 4f))
val confidence = closenessScore * 0.8f +
(if (centered && orientationOk) 0.2f else 0f)
return Triple(recommendation, ready, confidence)
}
private fun checkCentered(frame: FrameData, cameraInfo: CameraInfoData): Boolean {
val box = frame.segmentationBox ?: return false
val imgW = cameraInfo.sensorWidthPx
val imgH = cameraInfo.sensorHeightPx
val cxObj = (box.left + box.right) / 2f
val cyObj = (box.top + box.bottom) / 2f
val cx = imgW / 2f
val cy = imgH / 2f
val dx = abs(cxObj - cx) / imgW
val dy = abs(cyObj - cy) / imgH
return dx <= Constants.CENTER_TOLERANCE_X_FRACTION &&
dy <= Constants.CENTER_TOLERANCE_Y_FRACTION
}
}

View File

@ -0,0 +1,21 @@
package com.example.livingai.domain.ml
/**
* Singleton provider of camera intrinsic data.
* Must be initialized once per session.
*/
object CameraInfoProvider {
@Volatile
private var cameraInfoData: CameraInfoData? = null
fun init(info: CameraInfoData) {
cameraInfoData = info
}
fun get(): CameraInfoData {
return cameraInfoData
?: throw IllegalStateException("CameraInfoProvider not initialized")
}
fun tryGet(): CameraInfoData? = cameraInfoData
}

View File

@ -0,0 +1,77 @@
package com.example.livingai.domain.ml
import android.content.Context
import android.graphics.Rect
import android.hardware.camera2.CameraCharacteristics
import android.hardware.camera2.CameraManager
import android.util.Size
import android.util.SizeF
/**
* Utility to read camera intrinsics from Camera2 and compute focal length (pixels).
*
* Usage:
* val (fPx, imgW, imgH) = CameraIntrinsicsFetcher.fetch(context, cameraId, imageSize)
* CameraInfoProvider.init(CameraInfoData(fPx, imgW, imgH, px, py, ...))
*
* imageSize = the resolution you will actually receive from the ImageReader / CameraX output (width,height)
*
* Formula:
* f_px = f_mm / sensorWidth_mm * imageWidth_px
*
* More accurate: use activeArray size mapping to sensor physical size if needed.
*/
object CameraIntrinsicsFetcher {
data class Result(
val focalLengthPixels: Float,
val imageWidthPx: Int,
val imageHeightPx: Int,
val principalPointX: Float,
val principalPointY: Float,
val sensorPhysicalSizeMm: SizeF?
)
/**
* cameraId = device camera id (get from CameraManager)
* imageSize = the actual output image size you will capture (e.g., 1920x1080)
*/
fun fetch(context: Context, cameraId: String, imageSize: Size): Result {
val mgr = context.getSystemService(Context.CAMERA_SERVICE) as CameraManager
val characteristics = mgr.getCameraCharacteristics(cameraId)
val focalLengths = characteristics.get(CameraCharacteristics.LENS_INFO_AVAILABLE_FOCAL_LENGTHS)
val fMm = when {
focalLengths != null && focalLengths.isNotEmpty() -> focalLengths[0] // mm
else -> 4.0f
}
val sensorSize = characteristics.get(CameraCharacteristics.SENSOR_INFO_PHYSICAL_SIZE) // in mm
val sensorSizeMm = sensorSize
// active array size gives pixel array cropping of sensor -> map principal point
val activeRect = characteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE) // Rect
val activeRectW = activeRect?.width() ?: imageSize.width
val activeRectH = activeRect?.height() ?: imageSize.height
// Compute focal in pixels: ratio f_mm / sensorWidth_mm * imageWidth_px
val fPx = if (sensorSizeMm != null && sensorSizeMm.width > 0f) {
(fMm / sensorSizeMm.width) * imageSize.width
} else {
// fallback: estimate based on sensor pixel array
(fMm / 4.0f) * imageSize.width
}
val principalX = (activeRect?.centerX() ?: imageSize.width / 2).toFloat()
val principalY = (activeRect?.centerY() ?: imageSize.height / 2).toFloat()
return Result(
focalLengthPixels = fPx,
imageWidthPx = imageSize.width,
imageHeightPx = imageSize.height,
principalPointX = principalX,
principalPointY = principalY,
sensorPhysicalSizeMm = sensorSizeMm
)
}
}

View File

@ -0,0 +1,52 @@
package com.example.livingai.domain.ml
import android.graphics.Bitmap
import android.graphics.Rect
/**
* Interface for all distance estimators.
*/
interface DistanceEstimator {
fun analyze(
frame: FrameData,
cameraInfo: CameraInfoData
): DistanceState
}
// FrameData is defined in FrameData.kt
/**
* Singleton-provided camera intrinsics for metric calculations.
*/
data class CameraInfoData(
val focalLengthPixels: Float, // fx in pixels
val sensorWidthPx: Int,
val sensorHeightPx: Int,
val principalPointX: Float,
val principalPointY: Float,
val distortionCoeffs: FloatArray? = null,
val cameraModel: String? = null
)
/**
* Output state describing computed distance and capture readiness.
*/
data class DistanceState(
val distanceMeters: Float?,
val recommendation: DistanceRecommendation,
val isCameraTilted: Boolean,
val isCameraRotated: Boolean,
val isOrientationCorrect: Boolean,
val isObjectCentered: Boolean,
val readyToCapture: Boolean,
val confidenceScore: Float = 0f
)
enum class DistanceRecommendation {
MOVE_CLOSER,
MOVE_AWAY,
AT_OPTIMAL_DISTANCE,
DISTANCE_UNKNOWN
}

View File

@ -0,0 +1,158 @@
package com.example.livingai.domain.ml
import android.graphics.RectF
import android.util.Log
import com.example.livingai.utils.Constants
import kotlin.math.abs
import kotlin.math.max
import kotlin.math.sin
// ------------------------------------------------------------
// CONFIG CLASS
// ------------------------------------------------------------
data class AnalyzerThresholds(
val toleranceRatio: Float = 0.02f,
// Height estimation
val minTargetHeightMeters: Float = 0.60f,
val maxTargetHeightMeters: Float = 0.70f,
// Real physical height of subject
val subjectRealHeightMeters: Float = 1.55f
)
// ------------------------------------------------------------
// STATES
// ------------------------------------------------------------
sealed class FeedbackState(val message: String) {
object Idle : FeedbackState("")
object Searching : FeedbackState("Searching for subject...")
object TooFar : FeedbackState("Move closer")
object TooClose : FeedbackState("Move back")
object TooLow : FeedbackState("Raise phone")
object TooHigh : FeedbackState("Lower phone")
object PhoneTooLow : FeedbackState("Raise phone to 60 to 70 cm from ground")
object PhoneTooHigh : FeedbackState("Lower phone to 60 to 70 cm from ground")
object Optimal : FeedbackState("Hold still")
}
// ------------------------------------------------------------
// ANALYZER INTERFACE
// ------------------------------------------------------------
interface FeedbackAnalyzer {
fun analyze(
detection: ObjectDetector.DetectionResult?,
frameWidth: Int,
frameHeight: Int,
screenHeight: Int,
tiltDegrees: Float,
focalLengthPx: Float
): FeedbackState
}
// ------------------------------------------------------------
// IMPLEMENTATION
// ------------------------------------------------------------
class FeedbackAnalyzerImpl(
private val thresholds: AnalyzerThresholds
) : FeedbackAnalyzer {
override fun analyze(
detection: ObjectDetector.DetectionResult?,
frameWidth: Int,
frameHeight: Int,
screenHeight: Int,
tiltDegrees: Float,
focalLengthPx: Float
): FeedbackState {
if (detection == null) return FeedbackState.Searching
if (frameWidth <= 0 || frameHeight <= 0) return FeedbackState.Idle
val pc = Precomputed(
detection.boundingBox,
frameWidth,
frameHeight,
screenHeight,
thresholds
)
val cameraHeight = estimateCameraHeight(pc.detectionHeight, tiltDegrees, focalLengthPx)
Log.d("FeedbackAnalyzerImpl", "Camera Height: $cameraHeight")
return when {
// ORDER MATTERS — evaluate alignment first
isTooHigh(pc) -> FeedbackState.TooHigh
isTooLow(pc) -> FeedbackState.TooLow
isTooClose(pc) -> FeedbackState.TooClose
isTooFar(pc) -> FeedbackState.TooFar
// Height estimation last
isHeightTooLow(cameraHeight) -> FeedbackState.PhoneTooLow
isHeightTooHigh(cameraHeight) -> FeedbackState.PhoneTooHigh
else -> FeedbackState.Optimal
}
}
private fun isTooLow(pc: Precomputed): Boolean =
pc.isBottomInside && !pc.isTopInside && ((pc.frameTop - pc.detectionTop) > pc.tolerance)
private fun isTooHigh(pc: Precomputed): Boolean =
!pc.isBottomInside && pc.isTopInside && ((pc.detectionBottom - pc.frameBottom) > pc.tolerance)
// OBJECT TOO CLOSE (bigger than allowed)
private fun isTooClose(pc: Precomputed): Boolean =
!pc.isTopInside && !pc.isBottomInside && ((pc.detectionHeight - pc.frameHeight) > pc.tolerance)
// OBJECT TOO FAR (too small)
private fun isTooFar(pc: Precomputed): Boolean =
pc.isTopInside && pc.isBottomInside &&
((pc.frameHeight - pc.detectionHeight) > pc.tolerance)
private fun isHeightTooLow(heightMeters: Float): Boolean =
heightMeters > 0 &&
(thresholds.minTargetHeightMeters > heightMeters)
private fun isHeightTooHigh(heightMeters: Float): Boolean =
heightMeters > (thresholds.maxTargetHeightMeters)
private fun estimateCameraHeight(
pixelHeight: Float,
tiltDegrees: Float,
focalLengthPx: Float
): Float {
val tiltRad = Math.toRadians(tiltDegrees.toDouble())
val realHeight = thresholds.subjectRealHeightMeters
if (pixelHeight <= 0f || focalLengthPx <= 0f) return -1f
val distance = (realHeight * focalLengthPx) / pixelHeight
Log.d("FeedbackAnalyzerImpl", "Distance: $distance")
return (distance * sin(tiltRad)).toFloat()
}
private data class Precomputed(
val box: RectF,
val frameWidth: Int,
val frameHeight: Int,
val screenHeight: Int,
val t: AnalyzerThresholds
) {
private val modelFrameHeight = Constants.MODEL_HEIGHT
private val scaleSlip = ((screenHeight - modelFrameHeight) * screenHeight) / (2F * modelFrameHeight)
val detectionTop = (box.top * screenHeight / modelFrameHeight) - scaleSlip
val detectionBottom = (box.bottom * screenHeight / modelFrameHeight) - scaleSlip
val detectionHeight = max(0f, detectionBottom - detectionTop)
// Frame centered vertically
val frameTop = (screenHeight - frameHeight) / 2f
val frameBottom = frameTop + frameHeight
val tolerance = t.toleranceRatio * screenHeight
// Inside checks with tolerance
val isTopInside = detectionTop >= frameTop
val isBottomInside = detectionBottom <= frameBottom
}
}

View File

@ -0,0 +1,32 @@
package com.example.livingai.domain.ml
import android.graphics.Bitmap
import android.graphics.Rect
/**
* Frame-specific data for one inference cycle.
*/
data class FrameData(
val imageBitmap: Bitmap?,
val segmentationBox: Rect?,
val segmentationMaskBitmap: Bitmap?,
// Optional ARCore depth inputs
val depthMapMeters: FloatArray?, // row-major R* C
val depthWidth: Int = 0,
val depthHeight: Int = 0,
val depthConfidence: FloatArray? = null,
// IMU orientation
val imuPitchDegrees: Float = 0f,
val imuRollDegrees: Float = 0f,
val imuYawDegrees: Float = 0f,
val cameraRotationDegrees: Int = 0,
val timestampMs: Long = System.currentTimeMillis(),
//relative
val imageWidth: Int = 0,
val imageHeight: Int = 0,
val medianDepth: Float? = null
)

View File

@ -0,0 +1,194 @@
package com.example.livingai.domain.ml
import android.graphics.Bitmap
import android.graphics.Rect
import com.example.livingai.utils.TiltSensorManager
object FrameMetadataProvider {
lateinit var aiModel: AIModel // injected once from AppModule
var tiltSensorManager: TiltSensorManager? = null
// External data sources
var latestDepthResult: DepthResult? = null
var deviceRotation: Int = 0
suspend fun getSegmentation(bitmap: Bitmap): SegmentationResult? {
return try {
val (maskBitmap, booleanMask, bbox) = aiModel.segmentImage(bitmap) ?: return null
SegmentationResult(maskBitmap, booleanMask, bbox)
} catch (_: Exception) {
null
}
}
data class SegmentationResult(
val maskBitmap: Bitmap?,
val mask: BooleanArray,
val boundingBox: Rect
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as SegmentationResult
if (maskBitmap != other.maskBitmap) return false
if (!mask.contentEquals(other.mask)) return false
if (boundingBox != other.boundingBox) return false
return true
}
override fun hashCode(): Int {
var result = maskBitmap?.hashCode() ?: 0
result = 31 * result + mask.contentHashCode()
result = 31 * result + boundingBox.hashCode()
return result
}
}
fun getDepthData(): DepthResult {
return latestDepthResult ?: DepthResult(null, 0, 0, null)
}
data class DepthResult(
val depthMeters: FloatArray?,
val width: Int,
val height: Int,
val confidence: FloatArray?
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as DepthResult
if (depthMeters != null) {
if (other.depthMeters == null) return false
if (!depthMeters.contentEquals(other.depthMeters)) return false
} else if (other.depthMeters != null) return false
if (width != other.width) return false
if (height != other.height) return false
if (confidence != null) {
if (other.confidence == null) return false
if (!confidence.contentEquals(other.confidence)) return false
} else if (other.confidence != null) return false
return true
}
override fun hashCode(): Int {
var result = depthMeters?.contentHashCode() ?: 0
result = 31 * result + width
result = 31 * result + height
result = 31 * result + (confidence?.contentHashCode() ?: 0)
return result
}
}
fun getIMU(): IMUResult {
val (pitch, roll, yaw) = tiltSensorManager?.tilt?.value ?: Triple(0f, 0f, 0f)
return IMUResult(pitch, roll, yaw)
}
data class IMUResult(val pitch: Float, val roll: Float, val yaw: Float)
fun getRotation(): Int {
return deviceRotation
}
data class FrameCollectedMetadata(
val segmentationMaskBitmap: Bitmap?,
val segmentationBox: Rect?,
val depthMeters: FloatArray?,
val depthWidth: Int,
val depthHeight: Int,
val depthConfidence: FloatArray?,
val pitch: Float,
val roll: Float,
val yaw: Float,
val rotationDegrees: Int
) {
override fun equals(other: Any?): Boolean {
if (this === other) return true
if (javaClass != other?.javaClass) return false
other as FrameCollectedMetadata
if (segmentationMaskBitmap != other.segmentationMaskBitmap) return false
if (segmentationBox != other.segmentationBox) return false
if (depthMeters != null) {
if (other.depthMeters == null) return false
if (!depthMeters.contentEquals(other.depthMeters)) return false
} else if (other.depthMeters != null) return false
if (depthWidth != other.depthWidth) return false
if (depthHeight != other.depthHeight) return false
if (depthConfidence != null) {
if (other.depthConfidence == null) return false
if (!depthConfidence.contentEquals(other.depthConfidence)) return false
} else if (other.depthConfidence != null) return false
if (pitch != other.pitch) return false
if (roll != other.roll) return false
if (yaw != other.yaw) return false
if (rotationDegrees != other.rotationDegrees) return false
return true
}
override fun hashCode(): Int {
var result = segmentationMaskBitmap?.hashCode() ?: 0
result = 31 * result + (segmentationBox?.hashCode() ?: 0)
result = 31 * result + (depthMeters?.contentHashCode() ?: 0)
result = 31 * result + depthWidth
result = 31 * result + depthHeight
result = 31 * result + (depthConfidence?.contentHashCode() ?: 0)
result = 31 * result + pitch.hashCode()
result = 31 * result + roll.hashCode()
result = 31 * result + yaw.hashCode()
result = 31 * result + rotationDegrees
return result
}
}
suspend fun collectMetadata(bitmap: Bitmap): FrameCollectedMetadata {
val seg = getSegmentation(bitmap)
val depth = getDepthData()
val imu = getIMU()
val rot = getRotation()
return FrameCollectedMetadata(
segmentationMaskBitmap = seg?.maskBitmap,
segmentationBox = seg?.boundingBox,
depthMeters = depth.depthMeters,
depthWidth = depth.width,
depthHeight = depth.height,
depthConfidence = depth.confidence,
pitch = imu.pitch,
roll = imu.roll,
yaw = imu.yaw,
rotationDegrees = rot
)
}
fun FrameCollectedMetadata.toFrameData(bitmap: Bitmap): FrameData {
return FrameData(
imageBitmap = bitmap,
segmentationBox = segmentationBox,
segmentationMaskBitmap = segmentationMaskBitmap,
depthMapMeters = depthMeters,
depthWidth = depthWidth,
depthHeight = depthHeight,
depthConfidence = depthConfidence,
imuPitchDegrees = pitch,
imuRollDegrees = roll,
imuYawDegrees = yaw,
cameraRotationDegrees = rotationDegrees,
// New fields populated from bitmap if available or passed down
imageWidth = bitmap.width,
imageHeight = bitmap.height,
medianDepth = null // Can calculate median from depthMeters if needed
)
}
}

View File

@ -0,0 +1,103 @@
package com.example.livingai.domain.ml
import com.example.livingai.utils.Constants
import kotlin.math.abs
import kotlin.math.min
class KnownDimensionEstimator(
private val params: KnownDimensionParams = KnownDimensionParams()
) : DistanceEstimator {
data class KnownDimensionParams(
val knownObjectHeightMeters: Float = Constants.DEFAULT_OBJECT_REAL_HEIGHT_METERS,
val targetDistanceMeters: Float = Constants.TARGET_DISTANCE_METERS,
val strictToleranceMeters: Float = Constants.DISTANCE_TOLERANCE_METERS_STRICT,
val relaxedToleranceMeters: Float = Constants.DISTANCE_TOLERANCE_METERS_RELAXED
)
override fun analyze(frame: FrameData, cameraInfo: CameraInfoData): DistanceState {
val tilted = abs(frame.imuPitchDegrees) > Constants.MAX_ACCEPTABLE_PITCH_DEGREES ||
abs(frame.imuRollDegrees) > Constants.MAX_ACCEPTABLE_ROLL_DEGREES
val rotated = (frame.cameraRotationDegrees % 360) != 0
val orientationOk = !tilted && !rotated
val centered = checkCentered(frame, cameraInfo)
val distEstimate = computeDistance(frame, cameraInfo)
val (recommendation, ready, conf) =
evaluateDistanceAndReadiness(distEstimate, centered, orientationOk)
return DistanceState(
distanceMeters = distEstimate,
recommendation = recommendation,
isCameraTilted = tilted,
isCameraRotated = rotated,
isOrientationCorrect = orientationOk,
isObjectCentered = centered,
readyToCapture = ready,
confidenceScore = conf
)
}
private fun computeDistance(
frame: FrameData,
cameraInfo: CameraInfoData
): Float? {
val box = frame.segmentationBox ?: return null
val hPx = box.height().toFloat()
if (hPx <= 1f) return null
val f = cameraInfo.focalLengthPixels
return (f * params.knownObjectHeightMeters) / hPx
}
private fun evaluateDistanceAndReadiness(
distMeters: Float?,
centered: Boolean,
orientationOk: Boolean
): Triple<DistanceRecommendation, Boolean, Float> {
if (distMeters == null)
return Triple(DistanceRecommendation.DISTANCE_UNKNOWN, false, 0f)
val diff = distMeters - params.targetDistanceMeters
val absDiff = abs(diff)
val withinStrict = absDiff <= params.strictToleranceMeters
val withinRelaxed = absDiff <= params.relaxedToleranceMeters
val recommendation = when {
withinStrict -> DistanceRecommendation.AT_OPTIMAL_DISTANCE
diff > 0f -> DistanceRecommendation.MOVE_CLOSER
else -> DistanceRecommendation.MOVE_AWAY
}
val ready = withinRelaxed && centered && orientationOk
val closenessScore = 1f - min(1f, absDiff / (params.relaxedToleranceMeters * 4f))
val conf = closenessScore * 0.9f +
(if (centered && orientationOk) 0.1f else 0f)
return Triple(recommendation, ready, conf)
}
private fun checkCentered(frame: FrameData, cameraInfo: CameraInfoData): Boolean {
val box = frame.segmentationBox ?: return false
val imgW = cameraInfo.sensorWidthPx
val imgH = cameraInfo.sensorHeightPx
val objCx = (box.left + box.right) / 2f
val objCy = (box.top + box.bottom) / 2f
val cx = imgW / 2f
val cy = imgH / 2f
val dx = abs(objCx - cx) / imgW
val dy = abs(objCy - cy) / imgH
return dx <= Constants.CENTER_TOLERANCE_X_FRACTION &&
dy <= Constants.CENTER_TOLERANCE_Y_FRACTION
}
}

View File

@ -0,0 +1,12 @@
package com.example.livingai.domain.ml
import android.graphics.Bitmap
import android.graphics.RectF
interface ObjectDetector {
var onResults: (List<DetectionResult>, Long) -> Unit
var onError: (String) -> Unit
fun detect(bitmap: Bitmap, imageRotation: Int)
data class DetectionResult(val boundingBox: RectF, val text: String, val score: Float)
}

View File

@ -0,0 +1,119 @@
package com.example.livingai.domain.ml
import android.content.Context
import android.graphics.Bitmap
import android.graphics.RectF
import org.tensorflow.lite.DataType
import org.tensorflow.lite.Interpreter
import org.tensorflow.lite.support.image.ImageProcessor
import org.tensorflow.lite.support.image.TensorImage
import org.tensorflow.lite.support.image.ops.ResizeOp
import org.tensorflow.lite.support.image.ops.Rot90Op
import java.io.FileInputStream
import java.nio.channels.FileChannel
class ObjectDetectorImpl(
private val context: Context,
override var onResults: (List<ObjectDetector.DetectionResult>, Long) -> Unit,
override var onError: (String) -> Unit
) : ObjectDetector {
private var interpreter: Interpreter? = null
private val modelName = "efficientdet-lite0.tflite"
private val inputSize = 320 // EfficientDet-Lite0 expects 320x320
init {
setupInterpreter()
}
private fun setupInterpreter() {
try {
val assetFileDescriptor = context.assets.openFd(modelName)
val fileInputStream = FileInputStream(assetFileDescriptor.fileDescriptor)
val fileChannel = fileInputStream.channel
val startOffset = assetFileDescriptor.startOffset
val declaredLength = assetFileDescriptor.declaredLength
val modelBuffer = fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength)
val options = Interpreter.Options()
options.setNumThreads(4)
interpreter = Interpreter(modelBuffer, options)
} catch (e: Exception) {
onError(e.message ?: "Error loading model")
}
}
override fun detect(bitmap: Bitmap, imageRotation: Int) {
val tflite = interpreter ?: return
val startTime = System.currentTimeMillis()
// 1. Preprocess Image
// Rotate -> Resize -> Convert to TensorImage (UINT8)
val imageProcessor = ImageProcessor.Builder()
.add(Rot90Op(-imageRotation / 90))
.add(ResizeOp(inputSize, inputSize, ResizeOp.ResizeMethod.BILINEAR))
.build()
var tensorImage = TensorImage(DataType.UINT8)
tensorImage.load(bitmap)
tensorImage = imageProcessor.process(tensorImage)
// 2. Prepare Output Buffers
val outputBoxes = Array(1) { Array(25) { FloatArray(4) } }
val outputClasses = Array(1) { FloatArray(25) }
val outputScores = Array(1) { FloatArray(25) }
val outputCount = FloatArray(1)
val outputs = mapOf(
0 to outputBoxes,
1 to outputClasses,
2 to outputScores,
3 to outputCount
)
// 3. Run Inference
try {
tflite.runForMultipleInputsOutputs(arrayOf(tensorImage.buffer), outputs)
} catch (e: Exception) {
onError(e.message ?: "Inference failed")
return
}
val inferenceTime = System.currentTimeMillis() - startTime
// 4. Parse Results
val results = mutableListOf<ObjectDetector.DetectionResult>()
// Calculate dimensions of the rotated image (the coordinate space of the detections)
val rotatedWidth = if (imageRotation % 180 == 0) bitmap.width else bitmap.height
val rotatedHeight = if (imageRotation % 180 == 0) bitmap.height else bitmap.width
for (i in 0 until 25) {
val score = outputScores[0][i]
if (score < 0.4f) continue
val classId = outputClasses[0][i].toInt()
val label = if (classId == 20) "Cow" else "Object $classId"
// Get box: [ymin, xmin, ymax, xmax] (normalized 0..1)
val box = outputBoxes[0][i]
val ymin = box[0]
val xmin = box[1]
val ymax = box[2]
val xmax = box[3]
// Scale to rotated image dimensions
val left = xmin * rotatedWidth
val top = ymin * rotatedHeight
val right = xmax * rotatedWidth
val bottom = ymax * rotatedHeight
val boundingBox = RectF(left, top, right, bottom)
results.add(ObjectDetector.DetectionResult(boundingBox, label, score))
}
onResults(results, inferenceTime)
}
}

View File

@ -0,0 +1,182 @@
package com.example.livingai.domain.ml
import android.graphics.Bitmap
import android.graphics.Rect
import kotlin.math.max
import kotlin.math.min
class OrientationPixelEstimator(
private val iouThreshold: Float = 0.60f
) {
/**
* Main function:
* - segmentationMaskBitmap: MLKits alpha mask (animal foreground)
* - silhouetteBitmap: template mask for EXPECTED orientation (e.g., LEFT)
* - bbox: detected bounding box from segmentation
*/
fun analyze(
segmentationMaskBitmap: Bitmap,
silhouetteBitmap: Bitmap,
bbox: Rect,
frameWidth: Int,
frameHeight: Int,
medianDepthMeters: Float? = null
): OrientationPixelResult {
// 1) Convert both masks → boolean
val segFullMask = bitmapToBooleanMask(segmentationMaskBitmap)
val silhouetteMask = bitmapToBooleanMask(silhouetteBitmap)
// 2) Crop segmentation mask to bbox
val croppedMask = cropMaskToBBox(segFullMask, frameWidth, frameHeight, bbox)
// 3) Scale silhouette mask to bbox size
val scaledSilhouette = scaleMask(
silhouetteMask,
silhouetteBitmap.width,
silhouetteBitmap.height,
bbox.width(),
bbox.height()
)
// 4) Compute IoU
val iou = computeIoU(croppedMask, scaledSilhouette)
val orientationMatched = iou >= iouThreshold
// 5) Pixel metrics extraction
val metrics = computePixelMetrics(croppedMask, bbox, medianDepthMeters)
return OrientationPixelResult(
orientationMatched = orientationMatched,
matchedOrientation = null,
iouScore = iou,
iouBestOther = 0f,
pixelMetrics = metrics
)
}
// -----------------------------
// MASK HELPERS
// -----------------------------
private fun bitmapToBooleanMask(bitmap: Bitmap): BooleanArray {
val w = bitmap.width
val h = bitmap.height
val pixels = IntArray(w * h)
bitmap.getPixels(pixels, 0, w, 0, 0, w, h)
val out = BooleanArray(w * h)
for (i in pixels.indices) {
val alpha = (pixels[i] ushr 24) and 0xFF
out[i] = alpha > 0
}
return out
}
private fun cropMaskToBBox(
fullMask: BooleanArray,
frameW: Int,
frameH: Int,
bbox: Rect
): BooleanArray {
val left = max(0, bbox.left)
val top = max(0, bbox.top)
val right = min(frameW - 1, bbox.right)
val bottom = min(frameH - 1, bbox.bottom)
val width = right - left + 1
val height = bottom - top + 1
val out = BooleanArray(width * height)
var idx = 0
for (y in top..bottom) {
for (x in left..right) {
out[idx++] = fullMask[y * frameW + x]
}
}
return out
}
private fun scaleMask(
src: BooleanArray,
srcW: Int,
srcH: Int,
dstW: Int,
dstH: Int
): BooleanArray {
val out = BooleanArray(dstW * dstH)
for (y in 0 until dstH) {
val sy = ((y.toFloat() / dstH) * srcH).toInt().coerceIn(0, srcH - 1)
for (x in 0 until dstW) {
val sx = ((x.toFloat() / dstW) * srcW).toInt().coerceIn(0, srcW - 1)
out[y * dstW + x] = src[sy * srcW + sx]
}
}
return out
}
private fun computeIoU(a: BooleanArray, b: BooleanArray): Float {
if (a.size != b.size) return 0f
var inter = 0
var union = 0
for (i in a.indices) {
val ai = a[i]
val bi = b[i]
if (ai || bi) union++
if (ai && bi) inter++
}
return if (union == 0) 0f else inter.toFloat() / union
}
// -----------------------------
// PIXEL METRICS
// -----------------------------
private fun computePixelMetrics(
croppedMask: BooleanArray,
bbox: Rect,
medianDepthMeters: Float?
): PixelMetrics {
val w = bbox.width()
val h = bbox.height()
var count = 0
var sumX = 0L
var sumY = 0L
for (y in 0 until h) {
for (x in 0 until w) {
if (croppedMask[y * w + x]) {
count++
sumX += x
sumY += y
}
}
}
val centroidX = bbox.left + (sumX.toFloat() / max(1, count))
val centroidY = bbox.top + (sumY.toFloat() / max(1, count))
return PixelMetrics(
widthPx = w,
heightPx = h,
areaPx = count,
centroidX = centroidX,
centroidY = centroidY,
distanceProxyInvHeight = if (h > 0) 1f / h.toFloat() else Float.POSITIVE_INFINITY,
heightPxFloat = h.toFloat(),
medianDepthMeters = medianDepthMeters
)
}
}

View File

@ -0,0 +1,24 @@
package com.example.livingai.domain.ml
data class OrientationPixelResult(
val orientationMatched: Boolean, // true only if requested orientation is confidently matched
val matchedOrientation: Orientation?,// which orientation matched (if any)
val iouScore: Float, // IoU score for matched orientation (0..1)
val iouBestOther: Float, // best IoU among other orientations
val pixelMetrics: PixelMetrics? // null if orientation not matched
)
enum class Orientation {
LEFT, RIGHT, FRONT, BACK, LEFT_45, RIGHT_45, TOP, BOTTOM
}
data class PixelMetrics(
val widthPx: Int,
val heightPx: Int,
val areaPx: Int,
val centroidX: Float,
val centroidY: Float,
val distanceProxyInvHeight: Float, // 1 / heightPx (relative distance proxy)
val heightPxFloat: Float, // convenience
val medianDepthMeters: Float? // if depth map available (null otherwise)
)

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.ml
data class OrientationState(
val success: Boolean,
val reason: String,
val pixelMetrics: PixelMetrics?,
val orientationMatched: Boolean,
val iouScore: Float? = null,
val relativeDepth: Float? = null,
val absoluteDistanceMeters: Float? = null
)

View File

@ -0,0 +1,8 @@
package com.example.livingai.domain.ml
data class OrientationTemplate(
val orientation: Orientation,
val mask: BooleanArray,
val templateWidth: Int,
val templateHeight: Int
)

View File

@ -0,0 +1,136 @@
package com.example.livingai.domain.ml
import android.content.ContentValues
import android.content.Context
import android.graphics.Bitmap
import android.graphics.Canvas
import android.graphics.Color
import android.net.Uri
import android.os.Build
import android.provider.MediaStore
import com.google.mlkit.vision.common.InputImage
import com.google.mlkit.vision.segmentation.subject.SubjectSegmentation
import com.google.mlkit.vision.segmentation.subject.SubjectSegmenterOptions
import kotlinx.coroutines.suspendCancellableCoroutine
import java.io.OutputStream
import kotlin.coroutines.resume
import kotlin.coroutines.resumeWithException
class SubjectSegmenterHelper(private val context: Context) {
private suspend fun segmentInternal(image: InputImage): Bitmap? =
suspendCancellableCoroutine { continuation ->
val options = SubjectSegmenterOptions.Builder()
.enableMultipleSubjects(
SubjectSegmenterOptions.SubjectResultOptions.Builder()
.enableSubjectBitmap()
.build()
)
.build()
val segmenter = SubjectSegmentation.getClient(options)
segmenter.process(image)
.addOnSuccessListener { result ->
val subject = result.subjects
.maxByOrNull { it.width * it.height }
if (subject?.bitmap == null) {
continuation.resume(null)
return@addOnSuccessListener
}
try {
val output = Bitmap.createBitmap(
image.width,
image.height,
Bitmap.Config.ARGB_8888
)
val canvas = Canvas(output)
canvas.drawColor(Color.BLACK)
canvas.drawBitmap(
subject.bitmap!!,
subject.startX.toFloat(),
subject.startY.toFloat(),
null
)
continuation.resume(output)
} catch (e: Exception) {
continuation.resumeWithException(e)
}
}
.addOnFailureListener { e ->
continuation.resumeWithException(e)
}
.addOnCompleteListener {
segmenter.close()
}
}
suspend fun segmentToBitmap(inputBitmap: Bitmap): Bitmap? {
val image = InputImage.fromBitmap(inputBitmap, 0)
return segmentInternal(image)
}
suspend fun segmentAndSave(
inputBitmap: Bitmap,
animalId: String,
orientation: String,
subFolder: String? = null
): Uri? {
val image = InputImage.fromBitmap(inputBitmap, 0)
val bitmap = segmentInternal(image) ?: return null
return saveBitmap(bitmap, animalId, orientation, subFolder)
}
suspend fun segmentAndSave(
inputUri: Uri,
animalId: String,
orientation: String,
subFolder: String? = null
): Uri? {
val image = InputImage.fromFilePath(context, inputUri)
val bitmap = segmentInternal(image) ?: return null
return saveBitmap(bitmap, animalId, orientation, subFolder)
}
private fun saveBitmap(
bitmap: Bitmap,
animalId: String,
orientation: String,
subFolder: String?
): Uri? {
val filename = "${animalId}_${orientation}_segmented.jpg"
val values = ContentValues().apply {
put(MediaStore.MediaColumns.DISPLAY_NAME, filename)
put(MediaStore.MediaColumns.MIME_TYPE, "image/jpeg")
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val path =
if (subFolder != null)
"Pictures/LivingAI/$animalId/$subFolder"
else
"Pictures/LivingAI/$animalId"
put(MediaStore.MediaColumns.RELATIVE_PATH, path)
}
}
val uri = context.contentResolver.insert(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
values
) ?: return null
val outputStream: OutputStream? =
context.contentResolver.openOutputStream(uri)
outputStream?.use { out ->
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, out)
}
return uri
}
}

View File

@ -0,0 +1,18 @@
package com.example.livingai.domain.model
data class AnimalDetails(
val animalId: String,
val name: String,
val species: String,
val breed: String,
val sex: String,
val weight: Int,
val age: Int,
val milkYield: Int,
val calvingNumber: Int,
val reproductiveStatus: String,
val description: String,
val images: Map<String, String>,
val video: String,
val segmentedImages: Map<String, String>
)

View File

@ -0,0 +1,13 @@
package com.example.livingai.domain.model
data class AnimalProfile(
val animalId: String,
val name: String,
val species: String,
val breed: String,
val sex: String,
val weight: Int,
val age: Int,
val overallRating: Int? = null,
val imageUrls: List<String>
)

View File

@ -0,0 +1,32 @@
package com.example.livingai.domain.model
data class AnimalRating(
val animalId: String,
val overallRating: Int,
val healthRating: Int,
val breedRating: Int,
val stature: Int,
val chestWidth: Int,
val bodyDepth: Int,
val angularity: Int,
val rumpAngle: Int,
val rumpWidth: Int,
val rearLegSet: Int,
val rearLegRearView: Int,
val footAngle: Int,
val foreUdderAttachment: Int,
val rearUdderHeight: Int,
val centralLigament: Int,
val udderDepth: Int,
val frontTeatPosition: Int,
val teatLength: Int,
val rearTeatPosition: Int,
val locomotion: Int,
val bodyConditionScore: Int,
val hockDevelopment: Int,
val boneStructure: Int,
val rearUdderWidth: Int,
val teatThickness: Int,
val muscularity: Int,
val bodyConditionComments: String,
)

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.model
import kotlinx.serialization.Serializable
@Serializable
data class SettingsData(
val language: String = "en",
val isAutoCaptureOn: Boolean = false,
val jaccardThreshold: Float = 50f,
val distanceMethod: String = "Jaccard"
)

View File

@ -0,0 +1,80 @@
package com.example.livingai.domain.model.camera
import android.graphics.Bitmap
import android.graphics.RectF
/**
* Represents the output of a pipeline analysis step.
* @param message Instruction text to be displayed to the user.
* @param animationResId Resource ID for a visual GIF/Animation explaining the instruction.
* @param isValid True if the step passed validation, False otherwise.
* @param result The detailed analysis result (optional).
*/
data class Instruction(
val message: String,
val isValid: Boolean,
val animationResId: Int? = null,
val result: AnalysisResult? = null
)
/**
* Sealed interface for different types of analysis results.
*/
sealed interface AnalysisResult
data class OrientationResult(
val currentOrientation: Int,
val requiredOrientation: CameraOrientation
) : AnalysisResult
data class TiltResult(
val roll: Float,
val pitch: Float,
val isLevel: Boolean
) : AnalysisResult
data class DetectionResult(
val isAnimalDetected: Boolean,
val animalBounds: RectF?,
val referenceObjects: List<ReferenceObject>,
val label: String? = null,
val confidence: Float = 0f,
val segmentationMask: ByteArray? = null
) : AnalysisResult
data class PoseResult(
val isCorrectPose: Boolean,
val feedback: String
) : AnalysisResult
/**
* Data class representing a reference object detected in the scene.
*/
data class ReferenceObject(
val id: String,
val label: String,
val bounds: RectF,
val relativeHeight: Float,
val relativeWidth: Float,
val distance: Float? = null
)
enum class CameraOrientation {
PORTRAIT, LANDSCAPE
}
/**
* Data to be saved after a successful capture.
*/
data class CaptureData(
val image: Bitmap,
val segmentationMask: BooleanArray, // Flattened 2D array or similar representation
val animalMetrics: ObjectMetrics,
val referenceObjects: List<ReferenceObject>
)
data class ObjectMetrics(
val relativeHeight: Float,
val relativeWidth: Float,
val distance: Float
)

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.repository
import com.example.livingai.domain.model.SettingsData
import kotlinx.coroutines.flow.Flow
interface AppDataRepository {
fun getSettings(): Flow<SettingsData>
suspend fun saveSettings(settings: SettingsData)
suspend fun saveAppEntry()
fun readAppEntry(): Flow<Boolean>
}

View File

@ -0,0 +1,20 @@
package com.example.livingai.domain.repository
import android.graphics.Bitmap
import android.graphics.Rect
import androidx.camera.core.ImageProxy
import com.example.livingai.domain.ml.Orientation
import com.example.livingai.domain.ml.OrientationState
interface CameraRepository {
suspend fun captureImage(imageProxy: ImageProxy): Bitmap
suspend fun processFrame(
bitmap: Bitmap,
requestedOrientation: Orientation,
silhouetteBitmap: Bitmap,
realObjectHeightMeters: Float?,
focalLengthPixels: Float,
boundingBox: Rect? = null
): OrientationState
suspend fun saveImage(bitmap: Bitmap, animalId: String, orientation: String?): String
}

View File

@ -0,0 +1,9 @@
package com.example.livingai.domain.repository
import com.example.livingai.data.local.model.SettingsData
import kotlinx.coroutines.flow.Flow
interface SettingsRepository {
fun getSettings(): Flow<SettingsData>
suspend fun saveSettings(settings: SettingsData)
}

View File

@ -0,0 +1,10 @@
package com.example.livingai.domain.repository
import android.graphics.Bitmap
import androidx.camera.video.Recording
interface VideoRepository {
fun startRecording(onRecordingStarted: () -> Unit)
fun stopRecording()
suspend fun processFrame(bitmap: Bitmap): String
}

View File

@ -0,0 +1,12 @@
package com.example.livingai.domain.repository.business
import com.example.livingai.domain.model.AnimalDetails
import kotlinx.coroutines.flow.Flow
interface AnimalDetailsRepository {
fun getAnimalDetails(id: String): Flow<AnimalDetails?>
suspend fun saveAnimalDetails(animalDetails: AnimalDetails)
suspend fun deleteAnimalDetails(id: String)
}

View File

@ -0,0 +1,13 @@
package com.example.livingai.domain.repository.business
import androidx.paging.PagingData
import com.example.livingai.domain.model.AnimalProfile
import kotlinx.coroutines.flow.Flow
interface AnimalProfileRepository {
fun getAnimalProfiles(): Flow<PagingData<AnimalProfile>>
suspend fun saveAnimalProfile(animalProfile: AnimalProfile)
suspend fun deleteAnimalProfile(id: String)
}

View File

@ -0,0 +1,12 @@
package com.example.livingai.domain.repository.business
import com.example.livingai.domain.model.AnimalRating
import kotlinx.coroutines.flow.Flow
interface AnimalRatingRepository {
fun getAnimalRating(id: String): Flow<AnimalRating?>
suspend fun saveAnimalRating(animalRating: AnimalRating)
suspend fun deleteAnimalRating(id: String)
}

View File

@ -0,0 +1,18 @@
package com.example.livingai.domain.repository.business
import androidx.paging.PagingData
import com.example.livingai.domain.model.AnimalDetails
import com.example.livingai.domain.model.AnimalProfile
import com.example.livingai.domain.model.AnimalRating
import kotlinx.coroutines.flow.Flow
interface DataSource {
fun getAnimalProfiles(): Flow<PagingData<AnimalProfile>>
fun getAnimalDetails(animalId: String): Flow<AnimalDetails?>
fun getAnimalRatings(animalId: String): Flow<AnimalRating?>
suspend fun setAnimalProfile(animalProfile: AnimalProfile)
suspend fun setAnimalDetails(animalDetails: AnimalDetails)
suspend fun setAnimalRatings(animalRating: AnimalRating)
suspend fun deleteAnimalProfile(animalId: String)
}

View File

@ -0,0 +1,8 @@
package com.example.livingai.domain.usecases
data class AppDataUseCases(
val getSettings: GetSettingsUseCase,
val saveSettings: SaveSettingsUseCase,
val readAppEntry: ReadAppEntryUseCase,
val saveAppEntry: SaveAppEntryUseCase
)

View File

@ -0,0 +1,9 @@
package com.example.livingai.domain.usecases.AppEntry
import com.example.livingai.domain.usecases.ReadAppEntry
import com.example.livingai.domain.usecases.SaveAppEntry
data class AppEntryUseCases(
val readAppEntry: ReadAppEntry,
val saveAppEntry: SaveAppEntry
)

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.repository.business.AnimalProfileRepository
class DeleteAnimalProfile(
private val animalProfileRepository: AnimalProfileRepository
) {
suspend operator fun invoke(animalId: String) {
animalProfileRepository.deleteAnimalProfile(animalId)
}
}

View File

@ -0,0 +1,13 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.model.AnimalDetails
import com.example.livingai.domain.repository.business.AnimalDetailsRepository
import kotlinx.coroutines.flow.Flow
class GetAnimalDetails(
private val animalDetailsRepository: AnimalDetailsRepository
) {
operator fun invoke(animalId: String): Flow<AnimalDetails?> {
return animalDetailsRepository.getAnimalDetails(animalId)
}
}

View File

@ -0,0 +1,14 @@
package com.example.livingai.domain.usecases
import androidx.paging.PagingData
import com.example.livingai.domain.model.AnimalProfile
import com.example.livingai.domain.repository.business.AnimalProfileRepository
import kotlinx.coroutines.flow.Flow
class GetAnimalProfiles(
private val animalProfileRepository: AnimalProfileRepository
) {
operator fun invoke(): Flow<PagingData<AnimalProfile>> {
return animalProfileRepository.getAnimalProfiles()
}
}

View File

@ -0,0 +1,13 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.model.AnimalRating
import com.example.livingai.domain.repository.business.AnimalRatingRepository
import kotlinx.coroutines.flow.Flow
class GetAnimalRatings(
private val animalRatingRepository: AnimalRatingRepository
) {
operator fun invoke(animalId: String): Flow<AnimalRating?> {
return animalRatingRepository.getAnimalRating(animalId)
}
}

View File

@ -0,0 +1,10 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.repository.AppDataRepository
import javax.inject.Inject
class GetSettingsUseCase @Inject constructor(
private val appDataRepository: AppDataRepository
) {
operator fun invoke() = appDataRepository.getSettings()
}

View File

@ -0,0 +1,9 @@
package com.example.livingai.domain.usecases.ProfileEntry
import com.example.livingai.domain.usecases.GetAnimalDetails
import com.example.livingai.domain.usecases.SetAnimalDetails
data class ProfileEntryUseCase(
val getAnimalDetails: GetAnimalDetails,
val setAnimalDetails: SetAnimalDetails,
)

View File

@ -0,0 +1,9 @@
package com.example.livingai.domain.usecases.ProfileListing
import com.example.livingai.domain.usecases.DeleteAnimalProfile
import com.example.livingai.domain.usecases.GetAnimalProfiles
data class ProfileListingUseCase(
val getAnimalProfiles: GetAnimalProfiles,
val deleteAnimalProfile: DeleteAnimalProfile
)

View File

@ -0,0 +1,9 @@
package com.example.livingai.domain.usecases.ProfilesEntry
import com.example.livingai.domain.usecases.DeleteAnimalProfile
import com.example.livingai.domain.usecases.GetAnimalProfiles
data class ProfilesEntryUseCases(
val getAnimalProfiles: GetAnimalProfiles,
val deleteAnimalProfile: DeleteAnimalProfile
)

View File

@ -0,0 +1,12 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.manager.LocalUserManager
import kotlinx.coroutines.flow.Flow
class ReadAppEntry(
private val localUserManager: LocalUserManager
) {
operator fun invoke(): Flow<Boolean> {
return localUserManager.readAppEntry()
}
}

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.repository.AppDataRepository
import kotlinx.coroutines.flow.Flow
import javax.inject.Inject
class ReadAppEntryUseCase @Inject constructor(
private val appDataRepository: AppDataRepository
) {
operator fun invoke(): Flow<Boolean> = appDataRepository.readAppEntry()
}

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.manager.LocalUserManager
class SaveAppEntry(
private val localUserManager: LocalUserManager
) {
suspend operator fun invoke() {
localUserManager.saveAppEntry()
}
}

View File

@ -0,0 +1,10 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.repository.AppDataRepository
import javax.inject.Inject
class SaveAppEntryUseCase @Inject constructor(
private val appDataRepository: AppDataRepository
) {
suspend operator fun invoke() = appDataRepository.saveAppEntry()
}

View File

@ -0,0 +1,11 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.model.SettingsData
import com.example.livingai.domain.repository.AppDataRepository
import javax.inject.Inject
class SaveSettingsUseCase @Inject constructor(
private val appDataRepository: AppDataRepository
) {
suspend operator fun invoke(settings: SettingsData) = appDataRepository.saveSettings(settings)
}

View File

@ -0,0 +1,12 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.model.AnimalDetails
import com.example.livingai.domain.repository.business.AnimalDetailsRepository
class SetAnimalDetails(
private val animalDetailsRepository: AnimalDetailsRepository
) {
suspend operator fun invoke(animalDetails: AnimalDetails) {
animalDetailsRepository.saveAnimalDetails(animalDetails)
}
}

View File

@ -0,0 +1,12 @@
package com.example.livingai.domain.usecases
import com.example.livingai.domain.model.AnimalRating
import com.example.livingai.domain.repository.business.AnimalRatingRepository
class SetAnimalRatings(
private val animalRatingRepository: AnimalRatingRepository
) {
suspend operator fun invoke(animalRating: AnimalRating) {
animalRatingRepository.saveAnimalRating(animalRating)
}
}

View File

@ -0,0 +1,272 @@
package com.example.livingai.pages.addprofile
import android.annotation.SuppressLint
import androidx.compose.foundation.layout.Arrangement
import androidx.compose.foundation.layout.Column
import androidx.compose.foundation.layout.Row
import androidx.compose.foundation.layout.Spacer
import androidx.compose.foundation.layout.fillMaxWidth
import androidx.compose.foundation.layout.height
import androidx.compose.foundation.layout.padding
import androidx.compose.foundation.lazy.grid.GridCells
import androidx.compose.foundation.lazy.grid.GridItemSpan
import androidx.compose.foundation.lazy.grid.LazyVerticalGrid
import androidx.compose.foundation.lazy.grid.items
import androidx.compose.material3.Button
import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.MaterialTheme
import androidx.compose.material3.OutlinedButton
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
import androidx.compose.runtime.LaunchedEffect
import androidx.compose.runtime.getValue
import androidx.compose.runtime.remember
import androidx.compose.runtime.setValue
import androidx.compose.ui.Modifier
import androidx.compose.ui.focus.FocusRequester
import androidx.compose.ui.focus.focusRequester
import androidx.compose.ui.platform.LocalContext
import androidx.compose.ui.res.stringArrayResource
import androidx.compose.ui.res.stringResource
import androidx.compose.ui.text.font.FontWeight
import androidx.compose.ui.text.input.KeyboardType
import androidx.navigation.NavController
import com.example.livingai.R
import com.example.livingai.pages.commons.Dimentions
import com.example.livingai.pages.components.CommonScaffold
import com.example.livingai.pages.components.ImageThumbnailButton
import com.example.livingai.pages.components.LabeledDropdown
import com.example.livingai.pages.components.LabeledTextField
import com.example.livingai.pages.components.RadioGroup
import com.example.livingai.pages.components.VideoThumbnailButton
import com.example.livingai.utils.Constants
@OptIn(ExperimentalMaterial3Api::class)
@SuppressLint("UnusedMaterial3ScaffoldPaddingParameter")
@Composable
fun AddProfileScreen(
navController: NavController,
viewModel: AddProfileViewModel,
onSave: () -> Unit,
onCancel: () -> Unit,
onTakePhoto: (String) -> Unit,
onTakeVideo: () -> Unit
) {
val context = LocalContext.current
val speciesList = stringArrayResource(id = R.array.species_list).toList()
val breedList = stringArrayResource(id = R.array.cow_breed_list).toList()
val reproList = listOf(
stringResource(R.string.option_pregnant),
stringResource(R.string.option_calved),
stringResource(R.string.option_none)
)
val silhouette = Constants.silhouetteList.associateWith { item ->
val resId = context.resources.getIdentifier("label_${item}", "string", context.packageName)
if (resId != 0) resId else R.string.default_orientation_label
}
// Use ViewModel state
var species by viewModel.species
var breed by viewModel.breed
var age by viewModel.age
var milkYield by viewModel.milkYield
var calvingNumber by viewModel.calvingNumber
var reproductiveStatus by viewModel.reproductiveStatus
var description by viewModel.description
// Errors
val speciesError by viewModel.speciesError
val breedError by viewModel.breedError
val ageError by viewModel.ageError
val milkYieldError by viewModel.milkYieldError
val calvingNumberError by viewModel.calvingNumberError
val reproductiveStatusError by viewModel.reproductiveStatusError
val photos = viewModel.photos
val videoUri by viewModel.videoUri
// Focus Requesters
val speciesFocus = remember { FocusRequester() }
val breedFocus = remember { FocusRequester() }
val ageFocus = remember { FocusRequester() }
val milkYieldFocus = remember { FocusRequester() }
val calvingNumberFocus = remember { FocusRequester() }
// Auto-focus logic on error
LaunchedEffect(speciesError, breedError, ageError, milkYieldError, calvingNumberError, reproductiveStatusError) {
if (speciesError != null) {
speciesFocus.requestFocus()
} else if (breedError != null) {
breedFocus.requestFocus()
} else if (ageError != null) {
ageFocus.requestFocus()
} else if (milkYieldError != null) {
milkYieldFocus.requestFocus()
} else if (calvingNumberError != null) {
calvingNumberFocus.requestFocus()
}
}
CommonScaffold(
navController = navController,
title = stringResource(id = R.string.top_bar_add_profile)
) { innerPadding ->
LazyVerticalGrid(
columns = GridCells.Fixed(2),
verticalArrangement = Arrangement.spacedBy(Dimentions.SMALL_PADDING_INPUT),
horizontalArrangement = Arrangement.spacedBy(Dimentions.SMALL_PADDING_IMAGE),
modifier = Modifier
.padding(innerPadding)
.padding(Dimentions.SMALL_PADDING_TEXT)
) {
item(span = { GridItemSpan(2) }) {
Column(
verticalArrangement = Arrangement.spacedBy(Dimentions.SMALL_PADDING_INPUT)
) {
LabeledDropdown(
labelRes = R.string.label_species,
options = speciesList,
selected = species,
onSelected = viewModel::validateSpeciesInputs,
modifier = Modifier.focusRequester(speciesFocus),
isError = speciesError != null,
supportingText = speciesError
)
LabeledDropdown(
labelRes = R.string.label_breed,
options = breedList,
selected = breed,
onSelected = viewModel::validateBreedInputs,
modifier = Modifier.focusRequester(breedFocus),
isError = breedError != null,
supportingText = breedError
)
Row(
horizontalArrangement = Arrangement.spacedBy(Dimentions.SMALL_PADDING_INPUT),
modifier = Modifier.fillMaxWidth(),
) {
LabeledTextField(
labelRes = R.string.label_age,
value = age,
modifier = Modifier
.weight(1f)
.focusRequester(ageFocus),
onValueChange = viewModel::validateAgeInputs,
keyboardType = KeyboardType.Number,
isError = ageError != null,
supportingText = ageError
)
LabeledTextField(
labelRes = R.string.label_milk_yield,
value = milkYield,
modifier = Modifier
.weight(1f)
.focusRequester(milkYieldFocus),
onValueChange = viewModel::validateMilkYieldInputs,
keyboardType = KeyboardType.Number,
isError = milkYieldError != null,
supportingText = milkYieldError
)
}
LabeledTextField(
labelRes = R.string.label_calving_number,
value = calvingNumber,
modifier = Modifier
.fillMaxWidth()
.focusRequester(calvingNumberFocus),
onValueChange = viewModel::validateCalvingInputs,
keyboardType = KeyboardType.Number,
isError = calvingNumberError != null,
supportingText = calvingNumberError
)
RadioGroup(
titleRes = R.string.label_reproductive_status,
options = reproList,
selected = reproductiveStatus,
onSelected = viewModel::validateReproductiveStatusInputs,
isError = reproductiveStatusError != null
)
if (reproductiveStatusError != null) {
Text(
text = reproductiveStatusError ?: "",
color = MaterialTheme.colorScheme.error,
style = MaterialTheme.typography.bodySmall,
modifier = Modifier.padding(start = Dimentions.SMALL_PADDING_TEXT)
)
}
LabeledTextField(
labelRes = R.string.label_description,
value = description,
modifier = Modifier
.fillMaxWidth(),
onValueChange = { description = it },
keyboardType = KeyboardType.Text
)
Text(
text = stringResource(id = R.string.label_upload_media),
style = MaterialTheme.typography.labelLarge.copy(fontWeight = FontWeight.SemiBold),
color = MaterialTheme.colorScheme.onSurface,
modifier = Modifier.padding(vertical = Dimentions.SMALL_PADDING_TEXT)
)
}
}
items(silhouette.entries.toList()) { (key, value) ->
ImageThumbnailButton(
image = photos[key],
onClick = { onTakePhoto(key) },
labelRes = value
)
}
// Video Button
item {
VideoThumbnailButton(
videoSource = videoUri,
onClick = onTakeVideo
)
}
// Save and Cancel Buttons
item(span = { GridItemSpan(2) }) {
Row(
modifier = Modifier
.fillMaxWidth()
.padding(top = Dimentions.MEDIUM_PADDING_BUTTON),
horizontalArrangement = Arrangement.Center
) {
OutlinedButton(
onClick = onCancel,
modifier = Modifier
.weight(1f)
.padding(end = Dimentions.SMALL_PADDING_BUTTON)
) {
Text(text = stringResource(id = R.string.btn_cancel))
}
Button(
onClick = onSave,
modifier = Modifier
.weight(1f)
.padding(start = Dimentions.SMALL_PADDING_BUTTON)
) {
Text(text = stringResource(id = R.string.btn_save_profile))
}
}
}
item(span = { GridItemSpan(2) }) {
Spacer(modifier = Modifier.height(Dimentions.LARGE_PADDING))
}
}
}
}

View File

@ -0,0 +1,271 @@
package com.example.livingai.pages.addprofile
import android.util.Log
import androidx.compose.runtime.State
import androidx.compose.runtime.mutableStateMapOf
import androidx.compose.runtime.mutableStateOf
import androidx.lifecycle.SavedStateHandle
import androidx.lifecycle.ViewModel
import androidx.lifecycle.viewModelScope
import com.example.livingai.domain.model.AnimalDetails
import com.example.livingai.domain.usecases.ProfileEntry.ProfileEntryUseCase
import com.example.livingai.utils.CoroutineDispatchers
import com.example.livingai.utils.IdGenerator
import kotlinx.coroutines.flow.launchIn
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.launch
import kotlinx.coroutines.withContext
class AddProfileViewModel(
private val profileEntryUseCase: ProfileEntryUseCase,
private val dispatchers: CoroutineDispatchers,
private val savedStateHandle: SavedStateHandle? = null
) : ViewModel() {
companion object {
private const val KEY_ANIMAL_ID = "animal_id"
}
private val _animalDetails = mutableStateOf<AnimalDetails?>(null)
private val _currentAnimalId =
mutableStateOf(savedStateHandle?.get<String>(KEY_ANIMAL_ID))
val currentAnimalId: State<String?> = _currentAnimalId
var species = mutableStateOf<String?>(null)
var breed = mutableStateOf<String?>(null)
var age = mutableStateOf("")
var milkYield = mutableStateOf("")
var calvingNumber = mutableStateOf("")
var reproductiveStatus = mutableStateOf<String?>(null)
var description = mutableStateOf("")
var ageError = mutableStateOf<String?>(null)
var milkYieldError = mutableStateOf<String?>(null)
var calvingNumberError = mutableStateOf<String?>(null)
var speciesError = mutableStateOf<String?>(null)
var breedError = mutableStateOf<String?>(null)
var reproductiveStatusError = mutableStateOf<String?>(null)
private val _saveSuccess = mutableStateOf(false)
val photos = mutableStateMapOf<String, String>()
val segmentedImages = mutableStateMapOf<String, String>()
private val _videoUri = mutableStateOf<String?>(null)
val videoUri: State<String?> = _videoUri
fun initializeNewProfileIfNeeded() {
if (_currentAnimalId.value != null) return
val id = IdGenerator.generateAnimalId()
_currentAnimalId.value = id
savedStateHandle?.let { it[KEY_ANIMAL_ID] = id }
_animalDetails.value = null
species.value = null
breed.value = null
age.value = ""
milkYield.value = ""
calvingNumber.value = ""
reproductiveStatus.value = null
description.value = ""
clearErrors()
photos.clear()
segmentedImages.clear()
_videoUri.value = null
}
fun loadAnimal(animalId: String) {
if (_currentAnimalId.value == animalId) return
_currentAnimalId.value = animalId
savedStateHandle?.set(KEY_ANIMAL_ID, animalId)
profileEntryUseCase.getAnimalDetails(animalId)
.onEach { details ->
details ?: return@onEach
_animalDetails.value = details
species.value = details.species.ifBlank { null }
breed.value = details.breed.ifBlank { null }
age.value = details.age.takeIf { it > 0 }?.toString() ?: ""
milkYield.value = details.milkYield.takeIf { it > 0 }?.toString() ?: ""
calvingNumber.value = details.calvingNumber.takeIf { it > 0 }?.toString() ?: ""
reproductiveStatus.value = details.reproductiveStatus.ifBlank { null }
description.value = details.description
clearErrors()
photos.clear()
segmentedImages.clear()
withContext(dispatchers.main) {
details.images.forEach { photos[it.key] = it.value }
details.segmentedImages.forEach { segmentedImages[it.key] = it.value }
}
_videoUri.value = details.video.ifBlank { null }
}
.launchIn(viewModelScope)
}
fun saveAnimalDetails(currentId: String): Boolean {
if (!validateInputs()) return false
val id = currentId
val details = AnimalDetails(
animalId = id,
species = species.value ?: "",
breed = breed.value ?: "",
age = age.value.toIntOrNull() ?: 0,
milkYield = milkYield.value.toIntOrNull() ?: 0,
calvingNumber = calvingNumber.value.toIntOrNull() ?: 0,
reproductiveStatus = reproductiveStatus.value ?: "",
description = description.value,
images = photos.toMap(),
video = _videoUri.value ?: "",
segmentedImages = segmentedImages.toMap(),
name = "",
sex = "",
weight = 0
)
viewModelScope.launch {
profileEntryUseCase.setAnimalDetails(details)
_saveSuccess.value = true
}
return true
}
private fun clearErrors() {
ageError.value = null
milkYieldError.value = null
calvingNumberError.value = null
speciesError.value = null
breedError.value = null
reproductiveStatusError.value = null
}
fun addPhoto(orientation: String, uri: String) {
photos[orientation] = uri
}
fun addSegmentedImage(orientation: String, uri: String) {
segmentedImages[orientation] = uri
}
fun setVideo(uri: String) {
_videoUri.value = uri
}
private fun validateInputs(): Boolean {
var isValid = true
if (!validateSpeciesInputs()) isValid = false
if (!validateBreedInputs()) isValid = false
if (!validateAgeInputs()) isValid = false
if (!validateMilkYieldInputs()) isValid = false
if (!validateCalvingInputs()) isValid = false
if (!validateReproductiveStatusInputs()) isValid = false
return isValid
}
fun validateSpeciesInputs(s: String? = null): Boolean {
if (s != null)
species.value = s
var isValid = true
if (species.value.isNullOrBlank()) {
speciesError.value = "Species is required"
isValid = false
} else {
speciesError.value = null
}
return isValid
}
fun validateBreedInputs(b: String? = null): Boolean {
if (b != null)
breed.value = b
var isValid = true
if (breed.value.isNullOrBlank()) {
breedError.value = "Breed is required"
isValid = false
} else {
breedError.value = null
}
return isValid
}
fun validateReproductiveStatusInputs(r: String? = null): Boolean {
if (r != null)
reproductiveStatus.value = r
var isValid = true
if (reproductiveStatus.value.isNullOrBlank()) {
reproductiveStatusError.value = "Status is required"
isValid = false
} else {
reproductiveStatusError.value = null
}
return isValid
}
fun validateAgeInputs(a: String? = null): Boolean {
if (a != null)
age.value = a
var isValid = true
val ageInt = age.value.toIntOrNull()
if (ageInt == null || ageInt <= 0 || ageInt > 20) {
ageError.value = "Invalid age"
isValid = false
} else {
ageError.value = null
}
return isValid
}
fun validateMilkYieldInputs(m: String? = null): Boolean {
if (m != null)
milkYield.value = m
var isValid = true
val milkInt = milkYield.value.toIntOrNull()
if (milkInt == null || milkInt <= 0 || milkInt > 75) {
milkYieldError.value = "Invalid milk yield"
isValid = false
} else {
milkYieldError.value = null
}
return isValid
}
fun validateCalvingInputs(c: String? = null): Boolean {
if (c != null)
calvingNumber.value = c
var isValid = true
val calvingInt = calvingNumber.value.toIntOrNull()
if (calvingInt == null || calvingInt < 0 || calvingInt > 12) {
calvingNumberError.value = "Invalid calving number"
isValid = false
} else {
calvingNumberError.value = null
}
return isValid
}
}

Some files were not shown because too many files have changed in this diff Show More