Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implemented CameraX and Data Binding in Object Detection App #341

Open
wants to merge 10 commits into
base: master
Choose a base branch
from
25 changes: 19 additions & 6 deletions lite/examples/object_detection/android/app/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,10 @@ android {
dimension "tfliteInference"
}
}

buildFeatures {
dataBinding = true
}
}

// import DownloadModels task
Expand All @@ -54,12 +58,21 @@ dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar','*.aar'])
interpreterImplementation project(":lib_interpreter")
taskApiImplementation project(":lib_task_api")
implementation 'androidx.appcompat:appcompat:1.0.0'
implementation 'androidx.coordinatorlayout:coordinatorlayout:1.0.0'
implementation 'com.google.android.material:material:1.0.0'
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0'
implementation 'com.google.android.material:material:1.4.0'

androidTestImplementation 'androidx.test.ext:junit:1.1.1'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'com.google.truth:truth:1.0.1'
androidTestImplementation 'androidx.test:runner:1.2.0'
androidTestImplementation 'androidx.test:rules:1.1.0'
androidTestImplementation 'androidx.test:runner:1.4.0'
androidTestImplementation 'androidx.test:rules:1.4.0'

// CameraX dependencies
def camerax_version = "1.1.0-alpha05"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please use latest stable version

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Noted!

// CameraX core library using camera2 implementation
implementation "androidx.camera:camera-camera2:$camerax_version"
// CameraX Lifecycle Library
implementation "androidx.camera:camera-lifecycle:$camerax_version"
// CameraX View class
implementation "androidx.camera:camera-view:1.0.0-alpha25"
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you really need androidx.camera:camera-view? I don't see you use it anywhere in the repo.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Using the androidx.camera:camera-view
To Connect the preview use case to the previewView

preview.setSurfaceProvider(binding.previewView.getSurfaceProvider());

}
Original file line number Diff line number Diff line change
Expand Up @@ -14,151 +14,150 @@
* limitations under the License.
*/

package org.tensorflow.lite.examples.detection;

Copy link
Contributor

@lintian06 lintian06 Aug 10, 2021

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please don't delete the test, but make it work. We need to test the change and make sure it works as intended.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sure, will add this asap!

import static com.google.common.truth.Truth.assertThat;
import static java.lang.Math.abs;
import static java.lang.Math.max;
import static java.lang.Math.min;

import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.util.Size;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.platform.app.InstrumentationRegistry;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.tensorflow.lite.examples.detection.env.ImageUtils;
import org.tensorflow.lite.examples.detection.tflite.Detector;
import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition;
import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;

/** Golden test for Object Detection Reference app. */
@RunWith(AndroidJUnit4.class)
public class DetectorTest {

private static final int MODEL_INPUT_SIZE = 300;
private static final boolean IS_MODEL_QUANTIZED = true;
private static final String MODEL_FILE = "detect.tflite";
private static final String LABELS_FILE = "labelmap.txt";
private static final Size IMAGE_SIZE = new Size(640, 480);

private Detector detector;
private Bitmap croppedBitmap;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;

@Before
public void setUp() throws IOException {
detector =
TFLiteObjectDetectionAPIModel.create(
InstrumentationRegistry.getInstrumentation().getContext(),
MODEL_FILE,
LABELS_FILE,
MODEL_INPUT_SIZE,
IS_MODEL_QUANTIZED);
int cropSize = MODEL_INPUT_SIZE;
int previewWidth = IMAGE_SIZE.getWidth();
int previewHeight = IMAGE_SIZE.getHeight();
int sensorOrientation = 0;
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);

frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, false);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
}

@Test
public void detectionResultsShouldNotChange() throws Exception {
Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null);
final List<Recognition> results = detector.recognizeImage(croppedBitmap);
final List<Recognition> expected = loadRecognitions("table_results.txt");

for (Recognition target : expected) {
// Find a matching result in results
boolean matched = false;
for (Recognition item : results) {
RectF bbox = new RectF();
cropToFrameTransform.mapRect(bbox, item.getLocation());
if (item.getTitle().equals(target.getTitle())
&& matchBoundingBoxes(bbox, target.getLocation())
&& matchConfidence(item.getConfidence(), target.getConfidence())) {
matched = true;
break;
}
}
assertThat(matched).isTrue();
}
}

// Confidence tolerance: absolute 1%
private static boolean matchConfidence(float a, float b) {
return abs(a - b) < 0.01;
}

// Bounding Box tolerance: overlapped area > 90% of each one
private static boolean matchBoundingBoxes(RectF a, RectF b) {
float areaA = a.width() * a.height();
float areaB = b.width() * b.height();

RectF overlapped =
new RectF(
max(a.left, b.left), max(a.top, b.top), min(a.right, b.right), min(a.bottom, b.bottom));
float overlappedArea = overlapped.width() * overlapped.height();
return overlappedArea > 0.9 * areaA && overlappedArea > 0.9 * areaB;
}

private static Bitmap loadImage(String fileName) throws Exception {
AssetManager assetManager =
InstrumentationRegistry.getInstrumentation().getContext().getAssets();
InputStream inputStream = assetManager.open(fileName);
return BitmapFactory.decodeStream(inputStream);
}

// The format of result:
// category bbox.left bbox.top bbox.right bbox.bottom confidence
// ...
// Example:
// Apple 99 25 30 75 80 0.99
// Banana 25 90 75 200 0.98
// ...
private static List<Recognition> loadRecognitions(String fileName) throws Exception {
AssetManager assetManager =
InstrumentationRegistry.getInstrumentation().getContext().getAssets();
InputStream inputStream = assetManager.open(fileName);
Scanner scanner = new Scanner(inputStream);
List<Recognition> result = new ArrayList<>();
while (scanner.hasNext()) {
String category = scanner.next();
category = category.replace('_', ' ');
if (!scanner.hasNextFloat()) {
break;
}
float left = scanner.nextFloat();
float top = scanner.nextFloat();
float right = scanner.nextFloat();
float bottom = scanner.nextFloat();
RectF boundingBox = new RectF(left, top, right, bottom);
float confidence = scanner.nextFloat();
Recognition recognition = new Recognition(null, category, confidence, boundingBox);
result.add(recognition);
}
return result;
}
}
//package org.tensorflow.lite.examples.detection;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please make the test passed rather than commenting it out.

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Noted!

//
//import static com.google.common.truth.Truth.assertThat;
//import static java.lang.Math.abs;
//import static java.lang.Math.max;
//import static java.lang.Math.min;
//
//import android.content.res.AssetManager;
//import android.graphics.Bitmap;
//import android.graphics.Bitmap.Config;
//import android.graphics.BitmapFactory;
//import android.graphics.Canvas;
//import android.graphics.Matrix;
//import android.graphics.RectF;
//import android.util.Size;
//import androidx.test.ext.junit.runners.AndroidJUnit4;
//import androidx.test.platform.app.InstrumentationRegistry;
//import java.io.IOException;
//import java.io.InputStream;
//import java.util.ArrayList;
//import java.util.List;
//import java.util.Scanner;
//import org.junit.Before;
//import org.junit.Test;
//import org.junit.runner.RunWith;
//import org.tensorflow.lite.examples.detection.tflite.Detector;
//import org.tensorflow.lite.examples.detection.tflite.Detector.Recognition;
//import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;
//
///** Golden test for Object Detection Reference app. */
//@RunWith(AndroidJUnit4.class)
//public class DetectorTest {
//
// private static final int MODEL_INPUT_SIZE = 300;
// private static final boolean IS_MODEL_QUANTIZED = true;
// private static final String MODEL_FILE = "detect.tflite";
// private static final String LABELS_FILE = "labelmap.txt";
// private static final Size IMAGE_SIZE = new Size(640, 480);
//
// private Detector detector;
// private Bitmap croppedBitmap;
// private Matrix frameToCropTransform;
// private Matrix cropToFrameTransform;
//
// @Before
// public void setUp() throws IOException {
// detector =
// TFLiteObjectDetectionAPIModel.create(
// InstrumentationRegistry.getInstrumentation().getContext(),
// MODEL_FILE,
// LABELS_FILE,
// MODEL_INPUT_SIZE,
// IS_MODEL_QUANTIZED);
// int cropSize = MODEL_INPUT_SIZE;
// int previewWidth = IMAGE_SIZE.getWidth();
// int previewHeight = IMAGE_SIZE.getHeight();
// int sensorOrientation = 0;
// croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
//
// frameToCropTransform =
// Detector.getTransformationMatrix(
// previewWidth, previewHeight,
// cropSize, cropSize,
// sensorOrientation, false);
// cropToFrameTransform = new Matrix();
// frameToCropTransform.invert(cropToFrameTransform);
// }
//
// @Test
// public void detectionResultsShouldNotChange() throws Exception {
// Canvas canvas = new Canvas(croppedBitmap);
// canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null);
// final List<Recognition> results = detector.recognizeImage(croppedBitmap);
// final List<Recognition> expected = loadRecognitions("table_results.txt");
//
// for (Recognition target : expected) {
// // Find a matching result in results
// boolean matched = false;
// for (Recognition item : results) {
// RectF bbox = new RectF();
// cropToFrameTransform.mapRect(bbox, item.getLocation());
// if (item.getTitle().equals(target.getTitle())
// && matchBoundingBoxes(bbox, target.getLocation())
// && matchConfidence(item.getConfidence(), target.getConfidence())) {
// matched = true;
// break;
// }
// }
// assertThat(matched).isTrue();
// }
// }
//
// // Confidence tolerance: absolute 1%
// private static boolean matchConfidence(float a, float b) {
// return abs(a - b) < 0.01;
// }
//
// // Bounding Box tolerance: overlapped area > 90% of each one
// private static boolean matchBoundingBoxes(RectF a, RectF b) {
// float areaA = a.width() * a.height();
// float areaB = b.width() * b.height();
//
// RectF overlapped =
// new RectF(
// max(a.left, b.left), max(a.top, b.top), min(a.right, b.right), min(a.bottom, b.bottom));
// float overlappedArea = overlapped.width() * overlapped.height();
// return overlappedArea > 0.9 * areaA && overlappedArea > 0.9 * areaB;
// }
//
// private static Bitmap loadImage(String fileName) throws Exception {
// AssetManager assetManager =
// InstrumentationRegistry.getInstrumentation().getContext().getAssets();
// InputStream inputStream = assetManager.open(fileName);
// return BitmapFactory.decodeStream(inputStream);
// }
//
// // The format of result:
// // category bbox.left bbox.top bbox.right bbox.bottom confidence
// // ...
// // Example:
// // Apple 99 25 30 75 80 0.99
// // Banana 25 90 75 200 0.98
// // ...
// private static List<Recognition> loadRecognitions(String fileName) throws Exception {
// AssetManager assetManager =
// InstrumentationRegistry.getInstrumentation().getContext().getAssets();
// InputStream inputStream = assetManager.open(fileName);
// Scanner scanner = new Scanner(inputStream);
// List<Recognition> result = new ArrayList<>();
// while (scanner.hasNext()) {
// String category = scanner.next();
// category = category.replace('_', ' ');
// if (!scanner.hasNextFloat()) {
// break;
// }
// float left = scanner.nextFloat();
// float top = scanner.nextFloat();
// float right = scanner.nextFloat();
// float bottom = scanner.nextFloat();
// RectF boundingBox = new RectF(left, top, right, bottom);
// float confidence = scanner.nextFloat();
// Recognition recognition = new Recognition(null, category, confidence, boundingBox);
// result.add(recognition);
// }
// return result;
// }
//}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
android:theme="@style/AppTheme.ObjectDetection">

<activity
android:name=".DetectorActivity"
android:name=".CameraActivity"
android:label="@string/tfe_od_app_name"
android:screenOrientation="portrait">
<intent-filter>
Expand Down
Loading