From f77eef736c7141806bd068fda01ad0c9a826962d Mon Sep 17 00:00:00 2001 From: SaiD Date: Sun, 30 Nov 2025 12:33:44 +0530 Subject: [PATCH] Initial commit --- .gitattributes | 2 + .gitignore | 15 + .idea/.gitignore | 3 + .idea/AndroidProjectSystem.xml | 6 + .idea/compiler.xml | 6 + .idea/deploymentTargetSelector.xml | 10 + .idea/deviceManager.xml | 13 + .idea/gradle.xml | 19 + .idea/inspectionProfiles/Project_Default.xml | 61 ++ .idea/migrations.xml | 10 + .idea/misc.xml | 9 + .idea/runConfigurations.xml | 17 + .idea/studiobot.xml | 6 + app/.gitignore | 1 + app/build.gradle.kts | 74 +++ app/proguard-rules.pro | 21 + .../livingai/ExampleInstrumentedTest.kt | 24 + app/src/main/AndroidManifest.xml | 63 ++ .../com/example/livingai/analysis/Analyzer.kt | 17 + .../livingai/analysis/FrameProcessor.kt | 25 + .../livingai/analysis/SavedMaskProcessor.kt | 48 ++ .../livingai/analysis/SegmentProcessor.kt | 265 ++++++++ .../example/livingai/camera/CameraActivity.kt | 144 +++++ .../livingai/camera/CameraProcessor.kt | 177 ++++++ .../example/livingai/camera/OverlayManager.kt | 33 + .../com/example/livingai/commons/Constants.kt | 19 + .../example/livingai/storage/StorageUtils.kt | 45 ++ .../livingai/ui/overlay/MaskOverlay.kt | 61 ++ .../livingai/ui/overlay/SilhouetteOverlay.kt | 61 ++ .../ui/page/AddAnimalProfileActivity.kt | 501 +++++++++++++++ .../ui/page/FullScreenImageActivity.kt | 62 ++ .../livingai/ui/page/GalleryActivity.kt | 387 ++++++++++++ .../example/livingai/ui/page/HomeActivity.kt | 342 +++++++++++ .../livingai/ui/page/RatingActivity.kt | 310 ++++++++++ .../com/example/livingai/ui/theme/Color.kt | 11 + .../com/example/livingai/ui/theme/Theme.kt | 58 ++ .../com/example/livingai/ui/theme/Type.kt | 34 + app/src/main/res/drawable/angle.png | Bin 0 -> 6087 bytes app/src/main/res/drawable/back.png | Bin 0 -> 5005 bytes app/src/main/res/drawable/back2.png | Bin 0 -> 3864 bytes .../main/res/drawable/bg_rating_segment.xml | 9 + .../main/res/drawable/bg_spinner_rounded.xml | 5 + .../main/res/drawable/cow_illustration_0.png | Bin 0 -> 345336 bytes .../main/res/drawable/cow_illustration_1.png | Bin 0 -> 630861 bytes .../main/res/drawable/cow_illustration_2.png | Bin 0 -> 172209 bytes .../main/res/drawable/cow_illustration_3.png | Bin 0 -> 187321 bytes .../main/res/drawable/cow_illustration_4.png | Bin 0 -> 161199 bytes .../res/drawable/divider_rating_segment.xml | 6 + app/src/main/res/drawable/front.png | Bin 0 -> 3772 bytes app/src/main/res/drawable/ic_back_arrow.xml | 10 + .../res/drawable/ic_launcher_background.xml | 170 +++++ .../res/drawable/ic_launcher_foreground.xml | 30 + .../res/drawable/ic_round_back_button.xml | 22 + app/src/main/res/drawable/left.png | Bin 0 -> 27312 bytes app/src/main/res/drawable/leftangle.png | Bin 0 -> 48193 bytes app/src/main/res/drawable/right.png | Bin 0 -> 6115 bytes app/src/main/res/drawable/rightangle.png | Bin 0 -> 33974 bytes app/src/main/res/drawable/rounded_bg.xml | 5 + .../res/layout/activity_cow_selection.xml | 581 ++++++++++++++++++ .../res/layout/activity_full_screen_image.xml | 42 ++ app/src/main/res/layout/activity_gallery.xml | 103 ++++ app/src/main/res/layout/activity_home.xml | 293 +++++++++ app/src/main/res/layout/activity_main.xml | 86 +++ app/src/main/res/layout/activity_rating.xml | 157 +++++ .../main/res/layout/item_feature_rating.xml | 28 + .../main/res/layout/item_image_thumbnail.xml | 57 ++ .../res/mipmap-anydpi-v26/ic_launcher.xml | 6 + .../mipmap-anydpi-v26/ic_launcher_round.xml | 6 + app/src/main/res/mipmap-hdpi/ic_launcher.webp | Bin 0 -> 1404 bytes .../res/mipmap-hdpi/ic_launcher_round.webp | Bin 0 -> 2898 bytes app/src/main/res/mipmap-mdpi/ic_launcher.webp | Bin 0 -> 982 bytes .../res/mipmap-mdpi/ic_launcher_round.webp | Bin 0 -> 1772 bytes .../main/res/mipmap-xhdpi/ic_launcher.webp | Bin 0 -> 1900 bytes .../res/mipmap-xhdpi/ic_launcher_round.webp | Bin 0 -> 3918 bytes .../main/res/mipmap-xxhdpi/ic_launcher.webp | Bin 0 -> 2884 bytes .../res/mipmap-xxhdpi/ic_launcher_round.webp | Bin 0 -> 5914 bytes .../main/res/mipmap-xxxhdpi/ic_launcher.webp | Bin 0 -> 3844 bytes .../res/mipmap-xxxhdpi/ic_launcher_round.webp | Bin 0 -> 7778 bytes app/src/main/res/values-hi/strings.xml | 127 ++++ app/src/main/res/values/colors.xml | 28 + app/src/main/res/values/strings.xml | 127 ++++ app/src/main/res/values/themes.xml | 12 + app/src/main/res/xml/backup_rules.xml | 13 + .../main/res/xml/data_extraction_rules.xml | 19 + .../com/example/livingai/ExampleUnitTest.kt | 17 + build.gradle.kts | 6 + gradle.properties | 23 + gradle/libs.versions.toml | 35 ++ gradle/wrapper/gradle-wrapper.jar | Bin 0 -> 45457 bytes gradle/wrapper/gradle-wrapper.properties | 8 + gradlew | 251 ++++++++ gradlew.bat | 94 +++ settings.gradle.kts | 24 + 93 files changed, 5360 insertions(+) create mode 100644 .gitattributes create mode 100644 .gitignore create mode 100644 .idea/.gitignore create mode 100644 .idea/AndroidProjectSystem.xml create mode 100644 .idea/compiler.xml create mode 100644 .idea/deploymentTargetSelector.xml create mode 100644 .idea/deviceManager.xml create mode 100644 .idea/gradle.xml create mode 100644 .idea/inspectionProfiles/Project_Default.xml create mode 100644 .idea/migrations.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/runConfigurations.xml create mode 100644 .idea/studiobot.xml create mode 100644 app/.gitignore create mode 100644 app/build.gradle.kts create mode 100644 app/proguard-rules.pro create mode 100644 app/src/androidTest/java/com/example/livingai/ExampleInstrumentedTest.kt create mode 100644 app/src/main/AndroidManifest.xml create mode 100644 app/src/main/java/com/example/livingai/analysis/Analyzer.kt create mode 100644 app/src/main/java/com/example/livingai/analysis/FrameProcessor.kt create mode 100644 app/src/main/java/com/example/livingai/analysis/SavedMaskProcessor.kt create mode 100644 app/src/main/java/com/example/livingai/analysis/SegmentProcessor.kt create mode 100644 app/src/main/java/com/example/livingai/camera/CameraActivity.kt create mode 100644 app/src/main/java/com/example/livingai/camera/CameraProcessor.kt create mode 100644 app/src/main/java/com/example/livingai/camera/OverlayManager.kt create mode 100644 app/src/main/java/com/example/livingai/commons/Constants.kt create mode 100644 app/src/main/java/com/example/livingai/storage/StorageUtils.kt create mode 100644 app/src/main/java/com/example/livingai/ui/overlay/MaskOverlay.kt create mode 100644 app/src/main/java/com/example/livingai/ui/overlay/SilhouetteOverlay.kt create mode 100644 app/src/main/java/com/example/livingai/ui/page/AddAnimalProfileActivity.kt create mode 100644 app/src/main/java/com/example/livingai/ui/page/FullScreenImageActivity.kt create mode 100644 app/src/main/java/com/example/livingai/ui/page/GalleryActivity.kt create mode 100644 app/src/main/java/com/example/livingai/ui/page/HomeActivity.kt create mode 100644 app/src/main/java/com/example/livingai/ui/page/RatingActivity.kt create mode 100644 app/src/main/java/com/example/livingai/ui/theme/Color.kt create mode 100644 app/src/main/java/com/example/livingai/ui/theme/Theme.kt create mode 100644 app/src/main/java/com/example/livingai/ui/theme/Type.kt create mode 100644 app/src/main/res/drawable/angle.png create mode 100644 app/src/main/res/drawable/back.png create mode 100644 app/src/main/res/drawable/back2.png create mode 100644 app/src/main/res/drawable/bg_rating_segment.xml create mode 100644 app/src/main/res/drawable/bg_spinner_rounded.xml create mode 100644 app/src/main/res/drawable/cow_illustration_0.png create mode 100644 app/src/main/res/drawable/cow_illustration_1.png create mode 100644 app/src/main/res/drawable/cow_illustration_2.png create mode 100644 app/src/main/res/drawable/cow_illustration_3.png create mode 100644 app/src/main/res/drawable/cow_illustration_4.png create mode 100644 app/src/main/res/drawable/divider_rating_segment.xml create mode 100644 app/src/main/res/drawable/front.png create mode 100644 app/src/main/res/drawable/ic_back_arrow.xml create mode 100644 app/src/main/res/drawable/ic_launcher_background.xml create mode 100644 app/src/main/res/drawable/ic_launcher_foreground.xml create mode 100644 app/src/main/res/drawable/ic_round_back_button.xml create mode 100644 app/src/main/res/drawable/left.png create mode 100644 app/src/main/res/drawable/leftangle.png create mode 100644 app/src/main/res/drawable/right.png create mode 100644 app/src/main/res/drawable/rightangle.png create mode 100644 app/src/main/res/drawable/rounded_bg.xml create mode 100644 app/src/main/res/layout/activity_cow_selection.xml create mode 100644 app/src/main/res/layout/activity_full_screen_image.xml create mode 100644 app/src/main/res/layout/activity_gallery.xml create mode 100644 app/src/main/res/layout/activity_home.xml create mode 100644 app/src/main/res/layout/activity_main.xml create mode 100644 app/src/main/res/layout/activity_rating.xml create mode 100644 app/src/main/res/layout/item_feature_rating.xml create mode 100644 app/src/main/res/layout/item_image_thumbnail.xml create mode 100644 app/src/main/res/mipmap-anydpi-v26/ic_launcher.xml create mode 100644 app/src/main/res/mipmap-anydpi-v26/ic_launcher_round.xml create mode 100644 app/src/main/res/mipmap-hdpi/ic_launcher.webp create mode 100644 app/src/main/res/mipmap-hdpi/ic_launcher_round.webp create mode 100644 app/src/main/res/mipmap-mdpi/ic_launcher.webp create mode 100644 app/src/main/res/mipmap-mdpi/ic_launcher_round.webp create mode 100644 app/src/main/res/mipmap-xhdpi/ic_launcher.webp create mode 100644 app/src/main/res/mipmap-xhdpi/ic_launcher_round.webp create mode 100644 app/src/main/res/mipmap-xxhdpi/ic_launcher.webp create mode 100644 app/src/main/res/mipmap-xxhdpi/ic_launcher_round.webp create mode 100644 app/src/main/res/mipmap-xxxhdpi/ic_launcher.webp create mode 100644 app/src/main/res/mipmap-xxxhdpi/ic_launcher_round.webp create mode 100644 app/src/main/res/values-hi/strings.xml create mode 100644 app/src/main/res/values/colors.xml create mode 100644 app/src/main/res/values/strings.xml create mode 100644 app/src/main/res/values/themes.xml create mode 100644 app/src/main/res/xml/backup_rules.xml create mode 100644 app/src/main/res/xml/data_extraction_rules.xml create mode 100644 app/src/test/java/com/example/livingai/ExampleUnitTest.kt create mode 100644 build.gradle.kts create mode 100644 gradle.properties create mode 100644 gradle/libs.versions.toml create mode 100644 gradle/wrapper/gradle-wrapper.jar create mode 100644 gradle/wrapper/gradle-wrapper.properties create mode 100644 gradlew create mode 100644 gradlew.bat create mode 100644 settings.gradle.kts diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..dfe0770 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# Auto detect text files and perform LF normalization +* text=auto diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..aa724b7 --- /dev/null +++ b/.gitignore @@ -0,0 +1,15 @@ +*.iml +.gradle +/local.properties +/.idea/caches +/.idea/libraries +/.idea/modules.xml +/.idea/workspace.xml +/.idea/navEditor.xml +/.idea/assetWizardSettings.xml +.DS_Store +/build +/captures +.externalNativeBuild +.cxx +local.properties diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..26d3352 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,3 @@ +# Default ignored files +/shelf/ +/workspace.xml diff --git a/.idea/AndroidProjectSystem.xml b/.idea/AndroidProjectSystem.xml new file mode 100644 index 0000000..4a53bee --- /dev/null +++ b/.idea/AndroidProjectSystem.xml @@ -0,0 +1,6 @@ + + + + + \ No newline at end of file diff --git a/.idea/compiler.xml b/.idea/compiler.xml new file mode 100644 index 0000000..b86273d --- /dev/null +++ b/.idea/compiler.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.idea/deploymentTargetSelector.xml b/.idea/deploymentTargetSelector.xml new file mode 100644 index 0000000..b268ef3 --- /dev/null +++ b/.idea/deploymentTargetSelector.xml @@ -0,0 +1,10 @@ + + + + + + + + + \ No newline at end of file diff --git a/.idea/deviceManager.xml b/.idea/deviceManager.xml new file mode 100644 index 0000000..91f9558 --- /dev/null +++ b/.idea/deviceManager.xml @@ -0,0 +1,13 @@ + + + + + + \ No newline at end of file diff --git a/.idea/gradle.xml b/.idea/gradle.xml new file mode 100644 index 0000000..639c779 --- /dev/null +++ b/.idea/gradle.xml @@ -0,0 +1,19 @@ + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000..7061a0d --- /dev/null +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,61 @@ + + + + \ No newline at end of file diff --git a/.idea/migrations.xml b/.idea/migrations.xml new file mode 100644 index 0000000..f8051a6 --- /dev/null +++ b/.idea/migrations.xml @@ -0,0 +1,10 @@ + + + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..b2c751a --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,9 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/runConfigurations.xml b/.idea/runConfigurations.xml new file mode 100644 index 0000000..16660f1 --- /dev/null +++ b/.idea/runConfigurations.xml @@ -0,0 +1,17 @@ + + + + + + \ No newline at end of file diff --git a/.idea/studiobot.xml b/.idea/studiobot.xml new file mode 100644 index 0000000..539e3b8 --- /dev/null +++ b/.idea/studiobot.xml @@ -0,0 +1,6 @@ + + + + + \ No newline at end of file diff --git a/app/.gitignore b/app/.gitignore new file mode 100644 index 0000000..42afabf --- /dev/null +++ b/app/.gitignore @@ -0,0 +1 @@ +/build \ No newline at end of file diff --git a/app/build.gradle.kts b/app/build.gradle.kts new file mode 100644 index 0000000..bacf25d --- /dev/null +++ b/app/build.gradle.kts @@ -0,0 +1,74 @@ +plugins { + alias(libs.plugins.android.application) + alias(libs.plugins.kotlin.android) + alias(libs.plugins.kotlin.compose) +} + +android { + namespace = "com.example.livingai" + compileSdk = 36 + + defaultConfig { + applicationId = "com.example.livingai" + minSdk = 24 + targetSdk = 36 + versionCode = 1 + versionName = "1.0" + + testInstrumentationRunner = "androidx.test.runner.AndroidJUnitRunner" + } + + buildTypes { + release { + isMinifyEnabled = false + proguardFiles( + getDefaultProguardFile("proguard-android-optimize.txt"), + "proguard-rules.pro" + ) + } + } + compileOptions { + sourceCompatibility = JavaVersion.VERSION_11 + targetCompatibility = JavaVersion.VERSION_11 + } + kotlinOptions { + jvmTarget = "11" + } + buildFeatures { + compose = true + viewBinding = true + } +} + +dependencies { + val cameraxVersion = "1.5.0-alpha03" + implementation("androidx.appcompat:appcompat:1.7.0") + implementation("androidx.cardview:cardview:1.0.0") + implementation("com.google.android.material:material:1.12.0") + implementation("androidx.camera:camera-core:${cameraxVersion}") + implementation("androidx.camera:camera-camera2:${cameraxVersion}") + implementation("androidx.camera:camera-lifecycle:${cameraxVersion}") + implementation("androidx.camera:camera-view:${cameraxVersion}") + implementation("androidx.camera:camera-mlkit-vision:${cameraxVersion}") + + implementation("com.google.mlkit:object-detection:17.0.2") + implementation("com.google.android.gms:play-services-mlkit-subject-segmentation:16.0.0-beta1") + + implementation(libs.androidx.core.ktx) + implementation(libs.androidx.lifecycle.runtime.ktx) + implementation(libs.androidx.activity.compose) + implementation(platform(libs.androidx.compose.bom)) + implementation(libs.androidx.compose.ui) + implementation(libs.androidx.compose.ui.graphics) + implementation(libs.androidx.compose.ui.tooling.preview) + implementation(libs.androidx.compose.material3) + implementation(libs.androidx.appcompat) + implementation(libs.material) + testImplementation(libs.junit) + androidTestImplementation(libs.androidx.junit) + androidTestImplementation(libs.androidx.espresso.core) + androidTestImplementation(platform(libs.androidx.compose.bom)) + androidTestImplementation(libs.androidx.compose.ui.test.junit4) + debugImplementation(libs.androidx.compose.ui.tooling) + debugImplementation(libs.androidx.compose.ui.test.manifest) +} \ No newline at end of file diff --git a/app/proguard-rules.pro b/app/proguard-rules.pro new file mode 100644 index 0000000..481bb43 --- /dev/null +++ b/app/proguard-rules.pro @@ -0,0 +1,21 @@ +# Add project specific ProGuard rules here. +# You can control the set of applied configuration files using the +# proguardFiles setting in build.gradle. +# +# For more details, see +# http://developer.android.com/guide/developing/tools/proguard.html + +# If your project uses WebView with JS, uncomment the following +# and specify the fully qualified class name to the JavaScript interface +# class: +#-keepclassmembers class fqcn.of.javascript.interface.for.webview { +# public *; +#} + +# Uncomment this to preserve the line number information for +# debugging stack traces. +#-keepattributes SourceFile,LineNumberTable + +# If you keep the line number information, uncomment this to +# hide the original source file name. +#-renamesourcefileattribute SourceFile \ No newline at end of file diff --git a/app/src/androidTest/java/com/example/livingai/ExampleInstrumentedTest.kt b/app/src/androidTest/java/com/example/livingai/ExampleInstrumentedTest.kt new file mode 100644 index 0000000..23b80ee --- /dev/null +++ b/app/src/androidTest/java/com/example/livingai/ExampleInstrumentedTest.kt @@ -0,0 +1,24 @@ +package com.example.livingai + +import androidx.test.platform.app.InstrumentationRegistry +import androidx.test.ext.junit.runners.AndroidJUnit4 + +import org.junit.Test +import org.junit.runner.RunWith + +import org.junit.Assert.* + +/** + * Instrumented test, which will execute on an Android device. + * + * See [testing documentation](http://d.android.com/tools/testing). + */ +@RunWith(AndroidJUnit4::class) +class ExampleInstrumentedTest { + @Test + fun useAppContext() { + // Context of the app under test. + val appContext = InstrumentationRegistry.getInstrumentation().targetContext + assertEquals("com.example.livingai", appContext.packageName) + } +} \ No newline at end of file diff --git a/app/src/main/AndroidManifest.xml b/app/src/main/AndroidManifest.xml new file mode 100644 index 0000000..1197772 --- /dev/null +++ b/app/src/main/AndroidManifest.xml @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/app/src/main/java/com/example/livingai/analysis/Analyzer.kt b/app/src/main/java/com/example/livingai/analysis/Analyzer.kt new file mode 100644 index 0000000..ccafc24 --- /dev/null +++ b/app/src/main/java/com/example/livingai/analysis/Analyzer.kt @@ -0,0 +1,17 @@ +package com.example.livingai.analysis + +import androidx.camera.core.ImageAnalysis +import androidx.camera.core.ImageProxy + +class Analyzer( + private val listener: AnalysisListener +) : ImageAnalysis.Analyzer { + + interface AnalysisListener { + fun onFrame(imageProxy: ImageProxy) + } + + override fun analyze(image: ImageProxy) { + listener.onFrame(image) + } +} \ No newline at end of file diff --git a/app/src/main/java/com/example/livingai/analysis/FrameProcessor.kt b/app/src/main/java/com/example/livingai/analysis/FrameProcessor.kt new file mode 100644 index 0000000..af1faea --- /dev/null +++ b/app/src/main/java/com/example/livingai/analysis/FrameProcessor.kt @@ -0,0 +1,25 @@ +package com.example.livingai.analysis + +import android.graphics.Bitmap +import android.graphics.Color +import android.graphics.Matrix +import android.util.Log +import androidx.camera.core.ExperimentalGetImage +import androidx.camera.core.ImageProxy +import com.example.livingai.commons.Constants +import com.google.android.gms.tasks.Task +import com.google.android.gms.tasks.TaskCompletionSource +import com.google.mlkit.vision.common.InputImage +import java.util.concurrent.Executors +import java.util.concurrent.atomic.AtomicBoolean + +/** + * Responsible for processing frames from the camera. + * It prepares the frame (e.g. orientation) and passes it to SegmentProcessor. + */ +class FrameProcessor { + fun prepareInputImage(imageProxy: ImageProxy, rotationDegrees: Int): InputImage? { + val mediaImage = imageProxy.image ?: return null + return InputImage.fromMediaImage(mediaImage, rotationDegrees) + } +} diff --git a/app/src/main/java/com/example/livingai/analysis/SavedMaskProcessor.kt b/app/src/main/java/com/example/livingai/analysis/SavedMaskProcessor.kt new file mode 100644 index 0000000..4903cba --- /dev/null +++ b/app/src/main/java/com/example/livingai/analysis/SavedMaskProcessor.kt @@ -0,0 +1,48 @@ +package com.example.livingai.analysis + +import android.content.Context +import android.graphics.Bitmap +import android.graphics.BitmapFactory +import android.graphics.Color +import android.util.Log +import com.example.livingai.commons.Constants +import java.io.File + +class SavedMaskProcessor(private val context: Context) { + + fun loadSavedMask(orientation: String): Bitmap? { + val filename = "${orientation}_mask.png" + val file = File(context.filesDir, filename) + var savedMaskBitmap: Bitmap? = null + + if (file.exists()) { + try { + val savedBitmap = BitmapFactory.decodeFile(file.absolutePath) + + if (savedBitmap != null) { + //TODO: Added for debug can be removed safely + savedMaskBitmap = applyGreenColor(savedBitmap) + } + + } catch (e: Exception) { + Log.e("SavedMaskProcessor", "Error loading saved mask", e) + } + } + return savedMaskBitmap + } + + //TODO: Added for debug can be removed safely + private fun applyGreenColor(original: Bitmap): Bitmap { + val width = original.width + val height = original.height + val pixels = IntArray(width * height) + original.getPixels(pixels, 0, width, 0, 0, width, height) + for (i in pixels.indices) { + val alpha = (pixels[i] shr 24) and 0xff + if (alpha > 10) { + pixels[i] = Color.argb(alpha, 0, 255, 0) + } + } + return Bitmap.createBitmap(pixels, width, height, Bitmap.Config.ARGB_8888) + } +} diff --git a/app/src/main/java/com/example/livingai/analysis/SegmentProcessor.kt b/app/src/main/java/com/example/livingai/analysis/SegmentProcessor.kt new file mode 100644 index 0000000..978dd1b --- /dev/null +++ b/app/src/main/java/com/example/livingai/analysis/SegmentProcessor.kt @@ -0,0 +1,265 @@ +package com.example.livingai.analysis + +import android.content.res.Resources +import android.graphics.Bitmap +import android.graphics.Color +import android.graphics.Matrix +import android.util.Log +import com.example.livingai.commons.Constants +import com.google.android.gms.tasks.Task +import com.google.android.gms.tasks.TaskCompletionSource +import com.google.mlkit.vision.common.InputImage +import com.google.mlkit.vision.segmentation.subject.SubjectSegmentation +import com.google.mlkit.vision.segmentation.subject.SubjectSegmenterOptions +import java.util.concurrent.Executors +import java.util.concurrent.atomic.AtomicBoolean +import kotlin.math.max +import kotlin.math.min +import kotlin.math.sqrt + +data class SegmentationResult( + val mask: Bitmap?, + val isMatch: Boolean +) + +class SegmentProcessor { + + private val isProcessing = AtomicBoolean(false) + private val processingExecutor = Executors.newSingleThreadExecutor() + + private val metrics = Resources.getSystem().displayMetrics + private val screenW = metrics.widthPixels + private val screenH = metrics.heightPixels + + private val options = SubjectSegmenterOptions.Builder() + .enableMultipleSubjects( + SubjectSegmenterOptions.SubjectResultOptions.Builder() + .enableConfidenceMask() + .build() + ) + .build() + + private val segmenter = SubjectSegmentation.getClient(options) + + fun process( + inputImage: InputImage, + savedMask: Bitmap?, + thresholdPercent: Int = 75, + algorithm: String = Constants.ALGORITHM_HAMMING + ): Task { + + val tcs = TaskCompletionSource() + + if (!isProcessing.compareAndSet(false, true)) { + tcs.setResult(SegmentationResult(null, false)) + return tcs.task + } + + segmenter.process(inputImage) + .addOnSuccessListener(processingExecutor) { result -> + + var bitmapMask: Bitmap? = null + val subject = result.subjects.firstOrNull() + val mask = subject?.confidenceMask + + if (mask != null) { + + val startX = subject.startX + val startY = subject.startY + val maskWidth = subject.width + val maskHeight = subject.height + + val fullWidth = inputImage.width + val fullHeight = inputImage.height + + if (mask.remaining() >= maskWidth * maskHeight) { + + val colors = IntArray(fullWidth * fullHeight) + mask.rewind() + + // Build full-resolution mask + for (y in 0 until maskHeight) { + for (x in 0 until maskWidth) { + if (mask.get() > 0.5f) { + val dx = startX + x + val dy = startY + y + if (dx < fullWidth && dy < fullHeight) { + colors[dy * fullWidth + dx] = + Color.argb(180, 255, 0, 255) + } + } + } + } + + val rawBitmap = Bitmap.createBitmap( + colors, fullWidth, fullHeight, Bitmap.Config.ARGB_8888 + ) + + // Rotate to match screen orientation if needed (so segmentation mask is upright) + if (inputImage.rotationDegrees != 0) { + val matrix = Matrix() + matrix.postRotate(inputImage.rotationDegrees.toFloat()) + bitmapMask = Bitmap.createBitmap( + rawBitmap, 0, 0, rawBitmap.width, rawBitmap.height, matrix, true + ) + } else { + bitmapMask = rawBitmap + } + } + } + + var isMatch = false + + if (bitmapMask != null && savedMask != null) { + //To consider all kinds of phones with aspect ratio and screen size + val (scaledSavedMask, scaledSegmentedMask) = scaleBothToScreenAndCrop(savedMask, bitmapMask) + + isMatch = when (algorithm) { + Constants.ALGORITHM_EUCLIDEAN -> + calculateEuclideanDistance(scaledSavedMask, scaledSegmentedMask, thresholdPercent) + Constants.ALGORITHM_JACCARD -> + calculateJaccardSimilarity(scaledSavedMask, scaledSegmentedMask, thresholdPercent) + else -> + calculateHammingDistance(scaledSavedMask, scaledSegmentedMask, thresholdPercent) + } + } + + tcs.setResult(SegmentationResult(bitmapMask, isMatch)) + } + .addOnFailureListener { + Log.e("SegmentProcessor", "Segmentation failed", it) + tcs.setException(it) + } + .addOnCompleteListener { + isProcessing.set(false) + } + + return tcs.task + } + + private fun calculateHammingDistance(mask1: Bitmap, mask2: Bitmap, thresholdPercent: Int): Boolean { + if (mask1.width != mask2.width || mask1.height != mask2.height) return false + + val width = mask1.width + val height = mask1.height + val p1 = IntArray(width * height) + val p2 = IntArray(width * height) + + mask1.getPixels(p1, 0, width, 0, 0, width, height) + mask2.getPixels(p2, 0, width, 0, 0, width, height) + + var distance = 0 + + for (i in p1.indices) { + val a = (p1[i] ushr 24) > 0 + val b = (p2[i] ushr 24) > 0 + if (a != b) distance++ + } + + val total = width * height + val allowed = total * (100 - thresholdPercent) / 100 + + return distance <= allowed + } + + private fun calculateEuclideanDistance(mask1: Bitmap, mask2: Bitmap, thresholdPercent: Int): Boolean { + if (mask1.width != mask2.width || mask1.height != mask2.height) return false + + val width = mask1.width + val height = mask1.height + val p1 = IntArray(width * height) + val p2 = IntArray(width * height) + + mask1.getPixels(p1, 0, width, 0, 0, width, height) + mask2.getPixels(p2, 0, width, 0, 0, width, height) + + var sum = 0L + for (i in p1.indices) { + val v1 = if ((p1[i] ushr 24) > 0) 255 else 0 + val v2 = if ((p2[i] ushr 24) > 0) 255 else 0 + val diff = v1 - v2 + sum += diff * diff + } + + val dist = sqrt(sum.toDouble()) + val max = sqrt((width * height).toDouble()) * 255.0 + val allowed = max * (100 - thresholdPercent) / 100.0 + + return dist <= allowed + } + + private fun calculateJaccardSimilarity(mask1: Bitmap, mask2: Bitmap, thresholdPercent: Int): Boolean { + if (mask1.width != mask2.width || mask1.height != mask2.height) return false + + val width = mask1.width + val height = mask1.height + val p1 = IntArray(width * height) + val p2 = IntArray(width * height) + + mask1.getPixels(p1, 0, width, 0, 0, width, height) + mask2.getPixels(p2, 0, width, 0, 0, width, height) + + var intersection = 0 + var union = 0 + + for (i in p1.indices) { + val a = ((p1[i] ushr 24) and 0xFF) > 0 + val b = ((p2[i] ushr 24) and 0xFF) > 0 + + if (a && b) intersection++ + if (a || b) union++ + } + + if (union == 0) return false + + val score = (intersection.toDouble() / union.toDouble()) * 100.0 + + return score >= thresholdPercent + } + + fun centerCrop(bmp: Bitmap, tw: Int, th: Int): Bitmap { + val offsetX = (bmp.width - tw) / 2 + val offsetY = (bmp.height - th) / 2 + return Bitmap.createBitmap(bmp, offsetX, offsetY, tw, th) + } + + fun scaleFitCenter(bmp: Bitmap, isFit: Boolean): Bitmap { + val w = bmp.width + val h = bmp.height + var scale: Float? = 0F + // scale to fit inside screen + if (isFit) { + scale = min(screenW.toFloat() / w, screenH.toFloat() / h) + } else { + scale = max(screenW.toFloat() / w, screenH.toFloat() / h) + } + val newW = (w * scale).toInt() + val newH = (h * scale).toInt() + + return Bitmap.createScaledBitmap(bmp, newW, newH, true) + } + + fun scaleBothToScreenAndCrop( + savedMask: Bitmap, + segmentedMask: Bitmap + ): Pair { + + val scaledA = scaleFitCenter(savedMask, true) + val scaledB = scaleFitCenter(segmentedMask, false) + + val wA = scaledA.width + val hA = scaledA.height + val wB = scaledB.width + val hB = scaledB.height + + val targetW = min(wA, wB) + val targetH = min(hA, hB) + + val croppedA = centerCrop(scaledA, targetW, targetH) + val croppedB = centerCrop(scaledB, targetW, targetH) + + return Pair(croppedA, croppedB) + } + + +} diff --git a/app/src/main/java/com/example/livingai/camera/CameraActivity.kt b/app/src/main/java/com/example/livingai/camera/CameraActivity.kt new file mode 100644 index 0000000..6a3dce4 --- /dev/null +++ b/app/src/main/java/com/example/livingai/camera/CameraActivity.kt @@ -0,0 +1,144 @@ +package com.example.livingai.camera + +import android.Manifest +import android.content.pm.ActivityInfo +import android.graphics.Bitmap +import android.graphics.Color +import android.os.Bundle +import android.util.Log +import android.view.View +import androidx.activity.result.contract.ActivityResultContracts +import androidx.appcompat.app.AppCompatActivity +import androidx.camera.core.ExperimentalGetImage +import androidx.camera.core.ImageProxy +import androidx.core.content.edit +import androidx.core.graphics.toColorInt +import com.example.livingai.R +import com.example.livingai.analysis.Analyzer +import com.example.livingai.analysis.FrameProcessor +import com.example.livingai.analysis.SavedMaskProcessor +import com.example.livingai.analysis.SegmentProcessor +import com.example.livingai.commons.Constants +import com.example.livingai.ui.page.HomeActivity +import com.google.android.material.button.MaterialButton + +class CameraActivity : AppCompatActivity(), Analyzer.AnalysisListener { + + private lateinit var overlayManager: OverlayManager + private lateinit var cameraProcessor: CameraProcessor + private lateinit var frameProcessor: FrameProcessor + private lateinit var segmentProcessor: SegmentProcessor + private lateinit var savedMaskProcessor: SavedMaskProcessor + + private var cowName: String? = null + private var orientation: String? = null + private var savedMaskBitmap: Bitmap? = null + private var matchThreshold = 75 + private var algorithm = HomeActivity.ALGORITHM_HAMMING + private var isAutoCapture = true + private var isMaskDisplayEnabled = false + private var isPhotoTaken = false + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_main) + + cowName = intent.getStringExtra(Constants.COW_NAME) + orientation = intent.getStringExtra(Constants.ORIENTATION) + + // Load settings + val prefs = getSharedPreferences("AnimalRatingPrefs", MODE_PRIVATE) + matchThreshold = prefs.getInt("THRESHOLD", 75) + algorithm = prefs.getString("ALGORITHM", HomeActivity.ALGORITHM_HAMMING) ?: HomeActivity.ALGORITHM_HAMMING + isAutoCapture = prefs.getBoolean(Constants.PREF_AUTO_CAPTURE, true) + isMaskDisplayEnabled = prefs.getBoolean(Constants.PREF_MASK_DISPLAY, false) + + // Set orientation + if (orientation == "front" || orientation == "back") { + requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_PORTRAIT + } else { + requestedOrientation = ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE + } + + // Initialize Managers and Processors + overlayManager = OverlayManager( + findViewById(R.id.silhouetteOverlay), + findViewById(R.id.segmentationOverlay), + findViewById(R.id.savedMaskOverlay) + ) + + cameraProcessor = CameraProcessor(this, findViewById(R.id.cameraPreview), this) + frameProcessor = FrameProcessor() + segmentProcessor = SegmentProcessor() + savedMaskProcessor = SavedMaskProcessor(this) + + setupUI() + loadMasks() + + requestPermissionLauncher.launch(Manifest.permission.CAMERA) + } + + private fun setupUI() { + findViewById(R.id.btnExit).setOnClickListener { finish() } + + val btnShutter = findViewById(R.id.btnShutter) + + btnShutter.setOnClickListener { + cameraProcessor.takePhoto( + cowName, orientation, + intent.getIntExtra(Constants.SILHOUETTE_ID, 0), + intent.getStringExtra("RETAKE_IMAGE_PATH") + ) + isPhotoTaken = true + } + + val silhouetteId = intent.getIntExtra(Constants.SILHOUETTE_ID, 0) + overlayManager.setSilhouette(silhouetteId) + } + + private fun loadMasks() { + savedMaskBitmap = savedMaskProcessor.loadSavedMask(orientation ?: "unknown") + overlayManager.showSavedMask(savedMaskBitmap, isMaskDisplayEnabled) + } + + private val requestPermissionLauncher = + registerForActivityResult(ActivityResultContracts.RequestPermission()) { granted -> + if (granted) cameraProcessor.startCamera() + } + + @ExperimentalGetImage + override fun onFrame(imageProxy: ImageProxy) { + if (isPhotoTaken) { + imageProxy.close() + return + } + + val rotationDegrees = imageProxy.imageInfo.rotationDegrees + val inputImage = frameProcessor.prepareInputImage(imageProxy, rotationDegrees) + + if (inputImage != null) { + segmentProcessor.process(inputImage, savedMaskBitmap, matchThreshold, algorithm) + .addOnSuccessListener { result -> + runOnUiThread { + overlayManager.showSegmentationMask(result.mask, isMaskDisplayEnabled) + } + if (isAutoCapture && result.isMatch && !isPhotoTaken) { + isPhotoTaken = true + cameraProcessor.takePhoto( + cowName, orientation, + intent.getIntExtra(Constants.SILHOUETTE_ID, 0), + intent.getStringExtra("RETAKE_IMAGE_PATH") + ) + } + } + .addOnFailureListener { e -> + Log.e("CameraActivity", "Frame processing error", e) + } + .addOnCompleteListener { + imageProxy.close() + } + } else { + imageProxy.close() + } + } +} diff --git a/app/src/main/java/com/example/livingai/camera/CameraProcessor.kt b/app/src/main/java/com/example/livingai/camera/CameraProcessor.kt new file mode 100644 index 0000000..a54c588 --- /dev/null +++ b/app/src/main/java/com/example/livingai/camera/CameraProcessor.kt @@ -0,0 +1,177 @@ +package com.example.livingai.camera + +import android.content.Context +import android.graphics.Bitmap +import android.graphics.BitmapFactory +import android.graphics.Matrix +import android.util.Log +import android.util.Size +import android.widget.Toast +import androidx.camera.core.AspectRatio +import androidx.camera.core.ImageAnalysis +import androidx.camera.core.ImageCapture +import androidx.camera.core.ImageCaptureException +import androidx.camera.lifecycle.ProcessCameraProvider +import androidx.camera.view.PreviewView +import androidx.core.content.ContextCompat +import androidx.lifecycle.LifecycleOwner +import com.example.livingai.R +import com.example.livingai.analysis.Analyzer +import com.example.livingai.storage.StorageUtils +import java.io.File +import java.io.FileOutputStream +import java.util.concurrent.Executors +import kotlin.math.abs +import kotlin.math.max +import kotlin.math.min + +/** + * Responsible only for fetching camera frames and capturing images. + */ +class CameraProcessor( + private val context: Context, + private val previewView: PreviewView, + private val analysisListener: Analyzer.AnalysisListener +) { + + private var imageCapture: ImageCapture? = null + private val cameraExecutor = Executors.newSingleThreadExecutor() + var analysisSize: Size? = null + private set + + fun startCamera(onAnalysisSizeReady: (Int, Int) -> Unit = { _, _ -> }) { + val providerFuture = ProcessCameraProvider.getInstance(context) + + providerFuture.addListener({ + val cameraProvider = providerFuture.get() + + val preview = androidx.camera.core.Preview.Builder().build() + preview.surfaceProvider = previewView.surfaceProvider + + imageCapture = ImageCapture.Builder() + .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY) + .build() + + val metrics = context.resources.displayMetrics + val screenAspectRatio = aspectRatio(metrics.widthPixels, metrics.heightPixels) + + val analyzer = ImageAnalysis.Builder() + .setTargetAspectRatio(screenAspectRatio) + .setBackpressureStrategy( + ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST + ) + .build() + .also { + it.setAnalyzer(cameraExecutor, Analyzer(analysisListener)) + } + + cameraProvider.unbindAll() + + if (context is LifecycleOwner) { + cameraProvider.bindToLifecycle( + context, + androidx.camera.core.CameraSelector.DEFAULT_BACK_CAMERA, + preview, + imageCapture, + analyzer + ) + + // Get analysis resolution + analyzer.resolutionInfo?.resolution?.let { size -> + analysisSize = size + onAnalysisSizeReady(size.width, size.height) + } ?: run { + // Fallback: if resolution info is not available immediately + } + } + + }, ContextCompat.getMainExecutor(context)) + } + + private fun aspectRatio(width: Int, height: Int): Int { + val previewRatio = max(width, height).toDouble() / min(width, height) + if (abs(previewRatio - 4.0 / 3.0) <= abs(previewRatio - 16.0 / 9.0)) { + return AspectRatio.RATIO_4_3 + } + return AspectRatio.RATIO_16_9 + } + + fun takePhoto(cowName: String?, orientation: String?, silhouetteId: Int, retakePath: String? = null) { + val imageCapture = imageCapture ?: return + + val name = cowName ?: "unknown" + val side = orientation ?: "unknown" + + // Find current count for this cow and orientation + val cowFolder = StorageUtils.getCowImageFolder(name) + + val existingFiles = cowFolder.listFiles { _, fname -> + fname.startsWith("${name}_${side}_") && fname.endsWith(".jpg") + } + val count = (existingFiles?.size ?: 0) + 1 + + if (!cowFolder.exists()) { + cowFolder.mkdirs() + } + + val filename = "${name}_${side}_${count}.jpg" + val file = File(cowFolder, filename) + + val outputOptions = ImageCapture.OutputFileOptions.Builder(file).build() + + imageCapture.takePicture( + outputOptions, + ContextCompat.getMainExecutor(context), + object : ImageCapture.OnImageSavedCallback { + override fun onError(exc: ImageCaptureException) { + Log.e("CameraProcessor", "Photo capture failed: ${exc.message}", exc) + Toast.makeText(context, context.getString(R.string.toast_capture_failed), Toast.LENGTH_SHORT).show() + } + + override fun onImageSaved(output: ImageCapture.OutputFileResults) { + try { + val bitmap = BitmapFactory.decodeFile(file.absolutePath) + val matrix = Matrix() + + val rotation = if (orientation == "front" || orientation == "back") 90f else 0f + + if (rotation != 0f) { + matrix.postRotate(rotation) + val rotatedBitmap = Bitmap.createBitmap(bitmap, 0, 0, bitmap.width, bitmap.height, matrix, true) + FileOutputStream(file).use { out -> + rotatedBitmap.compress(Bitmap.CompressFormat.JPEG, 100, out) + } + } + + if (!retakePath.isNullOrEmpty()) { + val oldFile = File(retakePath) + if (oldFile.exists()) { + oldFile.delete() + } + } + + val msg = "${context.getString(R.string.toast_saved_as)} $filename" + Toast.makeText(context, msg, Toast.LENGTH_SHORT).show() + + // Navigate to FullScreenImageActivity + val intent = android.content.Intent(context, com.example.livingai.ui.page.FullScreenImageActivity::class.java) + intent.putExtra("IMAGE_PATH", file.absolutePath) + intent.putExtra("ALLOW_RETAKE", true) + intent.putExtra("COW_NAME", cowName) + intent.putExtra("ORIENTATION", orientation) + intent.putExtra("SILHOUETTE_ID", silhouetteId) + context.startActivity(intent) + + if (context is android.app.Activity) { + context.finish() + } + + } catch (e: Exception) { + Log.e("CameraProcessor", "Error saving image", e) + Toast.makeText(context, context.getString(R.string.toast_error_saving_image), Toast.LENGTH_SHORT).show() + } + } + } + ) + } +} diff --git a/app/src/main/java/com/example/livingai/camera/OverlayManager.kt b/app/src/main/java/com/example/livingai/camera/OverlayManager.kt new file mode 100644 index 0000000..c167a2d --- /dev/null +++ b/app/src/main/java/com/example/livingai/camera/OverlayManager.kt @@ -0,0 +1,33 @@ +package com.example.livingai.camera + +import android.graphics.Bitmap +import android.view.View +import android.widget.ImageView +import com.example.livingai.ui.overlay.MaskOverlay +import com.example.livingai.ui.overlay.SilhouetteOverlay + +class OverlayManager( + private val silhouetteOverlay: SilhouetteOverlay, + private val segmentationOverlay: MaskOverlay, + private val savedMaskOverlay: MaskOverlay +) { + + init { + // Configure scaling types + segmentationOverlay.scaleType = MaskOverlay.ScaleType.CENTER_CROP + savedMaskOverlay.scaleType = MaskOverlay.ScaleType.FIT_CENTER + } + + fun setSilhouette(silhouetteId: Int) { + silhouetteOverlay.setSilhouette(silhouetteId) + } + + fun showSegmentationMask(mask: Bitmap?, isEnabled: Boolean) { + segmentationOverlay.updateMask(if (isEnabled) mask else null) + } + + fun showSavedMask(mask: Bitmap?, isEnabled: Boolean) { + savedMaskOverlay.updateMask(if (isEnabled) mask else null) + } + +} diff --git a/app/src/main/java/com/example/livingai/commons/Constants.kt b/app/src/main/java/com/example/livingai/commons/Constants.kt new file mode 100644 index 0000000..5d15f14 --- /dev/null +++ b/app/src/main/java/com/example/livingai/commons/Constants.kt @@ -0,0 +1,19 @@ +package com.example.livingai.commons + +import android.provider.Settings.Global.getString + +object Constants { + const val ALGORITHM_HAMMING = "Hamming Distance" + const val ALGORITHM_EUCLIDEAN = "Euclidean Distance" + const val ALGORITHM_JACCARD = "Jaccard Similarity" + + const val PREF_AUTO_CAPTURE = "AUTO_CAPTURE_ENABLED" + const val PREF_MASK_DISPLAY = "MASK_DISPLAY_ENABLED" + const val PREF_DEBUG_ENABLE = "AUTO_DEBUG_ENABLED" + const val PREF_COW_ILLUSTRATION_INDEX = "COW_ILLUSTRATION_INDEX" + + const val COW_NAME = "COW_NAME" + const val ORIENTATION = "ORIENTATION" + const val SILHOUETTE_ID = "SILHOUETTE_ID" + val ORIENTATION_NAMES = listOf("left", "right", "angle", "front", "back", "leftangle", "rightangle") +} \ No newline at end of file diff --git a/app/src/main/java/com/example/livingai/storage/StorageUtils.kt b/app/src/main/java/com/example/livingai/storage/StorageUtils.kt new file mode 100644 index 0000000..c6339d0 --- /dev/null +++ b/app/src/main/java/com/example/livingai/storage/StorageUtils.kt @@ -0,0 +1,45 @@ +package com.example.livingai.storage + +import android.os.Environment +import java.io.File + +object StorageUtils { + + private const val ROOT_FOLDER_NAME = "com.LivingAI" + + private fun getBaseFolder(): File { + val folder = File(Environment.getExternalStorageDirectory(), "Android/media/$ROOT_FOLDER_NAME") + if (!folder.exists()) { + folder.mkdirs() + } + return folder + } + + fun getDocumentsFolder(): File { + val folder = File(getBaseFolder(), "Documents") + if (!folder.exists()) { + folder.mkdirs() + } + return folder + } + + fun getImagesBaseFolder(): File { + val folder = File(getBaseFolder(), "Images") + if (!folder.exists()) { + folder.mkdirs() + } + return folder + } + + fun getCowImageFolder(cowId: String): File { + return File(getImagesBaseFolder(), cowId) + } + + fun getVideosFolder(): File { + val folder = File(getBaseFolder(), "Videos") + if (!folder.exists()) { + folder.mkdirs() + } + return folder + } +} diff --git a/app/src/main/java/com/example/livingai/ui/overlay/MaskOverlay.kt b/app/src/main/java/com/example/livingai/ui/overlay/MaskOverlay.kt new file mode 100644 index 0000000..654f6f8 --- /dev/null +++ b/app/src/main/java/com/example/livingai/ui/overlay/MaskOverlay.kt @@ -0,0 +1,61 @@ +package com.example.livingai.ui.overlay + +import android.content.Context +import android.graphics.Bitmap +import android.graphics.Canvas +import android.graphics.Matrix +import android.util.AttributeSet +import android.view.View +import kotlin.math.max +import kotlin.math.min + +class MaskOverlay(context: Context, attrs: AttributeSet?) : View(context, attrs) { + + enum class ScaleType { + CENTER_CROP, + FIT_CENTER + } + + private var maskBitmap: Bitmap? = null + private val matrix = Matrix() + var scaleType: ScaleType = ScaleType.CENTER_CROP + + fun updateMask(bitmap: Bitmap?) { + maskBitmap = bitmap + invalidate() + } + + override fun onDraw(canvas: Canvas) { + super.onDraw(canvas) + + val bmp = maskBitmap ?: return + if (bmp.width == 0 || bmp.height == 0) return + + val viewWidth = width.toFloat() + val viewHeight = height.toFloat() + val bmpWidth = bmp.width.toFloat() + val bmpHeight = bmp.height.toFloat() + + matrix.reset() + + val scaleX = viewWidth / bmpWidth + val scaleY = viewHeight / bmpHeight + + val scale = if (scaleType == ScaleType.FIT_CENTER) { + min(scaleX, scaleY) + } else { + max(scaleX, scaleY) + } + + val scaledWidth = bmpWidth * scale + val scaledHeight = bmpHeight * scale + + val dx = (viewWidth - scaledWidth) / 2f + val dy = (viewHeight - scaledHeight) / 2f + + matrix.postScale(scale, scale) + matrix.postTranslate(dx, dy) + + canvas.drawBitmap(bmp, matrix, null) + } +} diff --git a/app/src/main/java/com/example/livingai/ui/overlay/SilhouetteOverlay.kt b/app/src/main/java/com/example/livingai/ui/overlay/SilhouetteOverlay.kt new file mode 100644 index 0000000..119502f --- /dev/null +++ b/app/src/main/java/com/example/livingai/ui/overlay/SilhouetteOverlay.kt @@ -0,0 +1,61 @@ +package com.example.livingai.ui.overlay + +import android.content.Context +import android.graphics.* +import android.util.AttributeSet +import android.util.Log +import android.view.View + +class SilhouetteOverlay(context: Context, attrs: AttributeSet?) : View(context, attrs) { + + private val paint = Paint().apply { + color = Color.GREEN + style = Paint.Style.STROKE + strokeWidth = 5f + } + + private val silhouettePaint = Paint().apply { + alpha = 128 // 50% opacity + } + + private var silhouette: Bitmap? = null + + fun setSilhouette(drawableId: Int) { + try { + if (drawableId != 0) { + silhouette = BitmapFactory.decodeResource(resources, drawableId) + } else { + silhouette = null + } + invalidate() + } catch (e: Exception) { + Log.e("SilhouetteOverlay", "Error loading silhouette", e) + } + } + + override fun onDraw(canvas: Canvas) { + super.onDraw(canvas) + + silhouette?.let { bmp -> + + val viewW = width.toFloat() + val viewH = height.toFloat() + val bmpW = bmp.width.toFloat() + val bmpH = bmp.height.toFloat() + + // Calculate scale to fit (FIT_CENTER) + val scale = kotlin.math.min(viewW / bmpW, viewH / bmpH) + + val scaledW = bmpW * scale + val scaledH = bmpH * scale + + val left = (viewW - scaledW) / 2f + val top = (viewH - scaledH) / 2f + + val destRect = RectF(left, top, left + scaledW, top + scaledH) + val srcRect = Rect(0, 0, bmp.width, bmp.height) + + canvas.drawBitmap(bmp, srcRect, destRect, silhouettePaint) + } + } +} \ No newline at end of file diff --git a/app/src/main/java/com/example/livingai/ui/page/AddAnimalProfileActivity.kt b/app/src/main/java/com/example/livingai/ui/page/AddAnimalProfileActivity.kt new file mode 100644 index 0000000..7d92853 --- /dev/null +++ b/app/src/main/java/com/example/livingai/ui/page/AddAnimalProfileActivity.kt @@ -0,0 +1,501 @@ +package com.example.livingai.ui.page + +import android.content.Intent +import android.content.pm.PackageManager +import android.graphics.Bitmap +import android.graphics.BitmapFactory +import android.graphics.Color +import android.os.Bundle +import android.view.View +import android.widget.ArrayAdapter +import android.widget.AutoCompleteTextView +import android.widget.Button +import android.widget.ImageView +import android.widget.LinearLayout +import android.widget.RadioButton +import android.widget.RadioGroup +import android.widget.TextView +import android.widget.Toast +import androidx.appcompat.app.AppCompatActivity +import androidx.core.app.ActivityCompat +import androidx.core.content.ContextCompat +import com.example.livingai.R +import com.example.livingai.camera.CameraActivity +import com.example.livingai.commons.Constants +import com.example.livingai.storage.StorageUtils +import com.google.android.material.textfield.TextInputEditText +import com.google.android.material.textfield.TextInputLayout +import java.io.File +import java.io.FileOutputStream +import java.io.FileWriter +import java.text.SimpleDateFormat +import java.util.Date +import java.util.Locale + +class AddAnimalProfileActivity : AppCompatActivity() { + + private var currentCowName: String? = null + private lateinit var imagesContainer: LinearLayout + private val storagePermissionCode = 101 + private val orientationViews = mutableMapOf() + private val initialImagePaths = mutableSetOf() + private val orientationList = Constants.ORIENTATION_NAMES + + override fun onCreate(savedInstanceState: Bundle?) { + super.onCreate(savedInstanceState) + setContentView(R.layout.activity_cow_selection) + + val toolbar = findViewById(R.id.toolbar) + setSupportActionBar(toolbar) + supportActionBar?.setDisplayShowTitleEnabled(false) + + toolbar.setNavigationOnClickListener { + finish() + } + + updateUILabels() + + initializeDefaultMasks() + setupDropdowns() + + imagesContainer = findViewById(R.id.currentCowImagesContainer) + + currentCowName = savedInstanceState?.getString("COW_NAME") ?: intent.getStringExtra("COW_NAME") + if (currentCowName == null) { + generateNewCowName() + } + + loadInitialImages() + + if (intent.hasExtra("COW_NAME")) { + loadCowDetails(currentCowName!!) + } + + for (orientation in orientationList) { + orientationViews[orientation] = findViewById(resources.getIdentifier(orientation, "id", packageName)) + } + + findViewById