SamMorgan commited on
Commit
7f7b618
1 Parent(s): 5a3f3ba

Upload android files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. android/.gitignore +13 -0
  2. android/app/.gitignore +2 -0
  3. android/app/build.gradle +61 -0
  4. android/app/download_model.gradle +26 -0
  5. android/app/proguard-rules.pro +21 -0
  6. android/app/src/androidTest/assets/table.jpg +0 -0
  7. android/app/src/androidTest/assets/table_results.txt +4 -0
  8. android/app/src/androidTest/java/AndroidManifest.xml +5 -0
  9. android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java +165 -0
  10. android/app/src/main/AndroidManifest.xml +35 -0
  11. android/app/src/main/assets/coco.txt +80 -0
  12. android/app/src/main/assets/kite.jpg +0 -0
  13. android/app/src/main/assets/labelmap.txt +91 -0
  14. android/app/src/main/assets/yolov4-416-fp32.tflite +3 -0
  15. android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java +550 -0
  16. android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java +569 -0
  17. android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java +266 -0
  18. android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java +199 -0
  19. android/app/src/main/java/org/tensorflow/lite/examples/detection/MainActivity.java +162 -0
  20. android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java +72 -0
  21. android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java +48 -0
  22. android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java +67 -0
  23. android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java +23 -0
  24. android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java +128 -0
  25. android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java +219 -0
  26. android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java +186 -0
  27. android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java +142 -0
  28. android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Utils.java +188 -0
  29. android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/Classifier.java +134 -0
  30. android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/YoloV4Classifier.java +599 -0
  31. android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java +211 -0
  32. android/app/src/main/res/drawable-hdpi/ic_launcher.png +0 -0
  33. android/app/src/main/res/drawable-mdpi/ic_launcher.png +0 -0
  34. android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml +34 -0
  35. android/app/src/main/res/drawable-v24/kite.jpg +0 -0
  36. android/app/src/main/res/drawable-xxhdpi/ic_launcher.png +0 -0
  37. android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png +0 -0
  38. android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png +0 -0
  39. android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png +0 -0
  40. android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png +0 -0
  41. android/app/src/main/res/drawable-xxxhdpi/caret.jpg +0 -0
  42. android/app/src/main/res/drawable-xxxhdpi/chair.jpg +0 -0
  43. android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg +0 -0
  44. android/app/src/main/res/drawable/bottom_sheet_bg.xml +9 -0
  45. android/app/src/main/res/drawable/ic_baseline_add.xml +9 -0
  46. android/app/src/main/res/drawable/ic_baseline_remove.xml +9 -0
  47. android/app/src/main/res/drawable/ic_launcher_background.xml +170 -0
  48. android/app/src/main/res/drawable/rectangle.xml +13 -0
  49. android/app/src/main/res/layout/activity_main.xml +52 -0
  50. android/app/src/main/res/layout/tfe_od_activity_camera.xml +56 -0
android/.gitignore ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.iml
2
+ .gradle
3
+ /local.properties
4
+ /.idea/libraries
5
+ /.idea/modules.xml
6
+ /.idea/workspace.xml
7
+ .DS_Store
8
+ /build
9
+ /captures
10
+ .externalNativeBuild
11
+
12
+ /.gradle/
13
+ /.idea/
android/app/.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ /build
2
+ /build/
android/app/build.gradle ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ apply plugin: 'com.android.application'
2
+ apply plugin: 'de.undercouch.download'
3
+
4
+ android {
5
+ compileSdkVersion 28
6
+ buildToolsVersion '28.0.3'
7
+ defaultConfig {
8
+ applicationId "org.tensorflow.lite.examples.detection"
9
+ minSdkVersion 21
10
+ targetSdkVersion 28
11
+ versionCode 1
12
+ versionName "1.0"
13
+
14
+ // ndk {
15
+ // abiFilters 'armeabi-v7a', 'arm64-v8a'
16
+ // }
17
+ }
18
+ buildTypes {
19
+ release {
20
+ minifyEnabled false
21
+ proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
22
+ }
23
+ }
24
+ aaptOptions {
25
+ noCompress "tflite"
26
+ }
27
+ compileOptions {
28
+ sourceCompatibility = '1.8'
29
+ targetCompatibility = '1.8'
30
+ }
31
+ lintOptions {
32
+ abortOnError false
33
+ }
34
+ }
35
+
36
+ // import DownloadModels task
37
+ project.ext.ASSET_DIR = projectDir.toString() + '/src/main/assets'
38
+ project.ext.TMP_DIR = project.buildDir.toString() + '/downloads'
39
+
40
+ // Download default models; if you wish to use your own models then
41
+ // place them in the "assets" directory and comment out this line.
42
+ //apply from: "download_model.gradle"
43
+
44
+ apply from: 'download_model.gradle'
45
+
46
+ dependencies {
47
+ implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
48
+ implementation 'androidx.appcompat:appcompat:1.1.0'
49
+ implementation 'androidx.coordinatorlayout:coordinatorlayout:1.1.0'
50
+ implementation 'com.google.android.material:material:1.1.0'
51
+ // implementation 'org.tensorflow:tensorflow-lite:0.0.0-nightly'
52
+ // implementation 'org.tensorflow:tensorflow-lite-gpu:0.0.0-nightly'
53
+ implementation 'org.tensorflow:tensorflow-lite:2.2.0'
54
+ implementation 'org.tensorflow:tensorflow-lite-gpu:2.2.0'
55
+ // implementation 'org.tensorflow:tensorflow-lite:0.0.0-gpu-experimental'
56
+ implementation 'androidx.constraintlayout:constraintlayout:1.1.3'
57
+ implementation 'com.google.code.gson:gson:2.8.6'
58
+ androidTestImplementation 'androidx.test.ext:junit:1.1.1'
59
+ androidTestImplementation 'com.android.support.test:rules:1.0.2'
60
+ androidTestImplementation 'com.google.truth:truth:1.0.1'
61
+ }
android/app/download_model.gradle ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ task downloadZipFile(type: Download) {
3
+ src 'http://storage.googleapis.com/download.tensorflow.org/models/tflite/coco_ssd_mobilenet_v1_1.0_quant_2018_06_29.zip'
4
+ dest new File(buildDir, 'zips/')
5
+ overwrite false
6
+ }
7
+
8
+
9
+ task downloadAndUnzipFile(dependsOn: downloadZipFile, type: Copy) {
10
+ from zipTree(downloadZipFile.dest)
11
+ into project.ext.ASSET_DIR
12
+ }
13
+
14
+
15
+ task extractModels(type: Copy) {
16
+ dependsOn downloadAndUnzipFile
17
+ }
18
+
19
+ tasks.whenTaskAdded { task ->
20
+ if (task.name == 'assembleDebug') {
21
+ task.dependsOn 'extractModels'
22
+ }
23
+ if (task.name == 'assembleRelease') {
24
+ task.dependsOn 'extractModels'
25
+ }
26
+ }
android/app/proguard-rules.pro ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Add project specific ProGuard rules here.
2
+ # You can control the set of applied configuration files using the
3
+ # proguardFiles setting in build.gradle.
4
+ #
5
+ # For more details, see
6
+ # http://developer.android.com/guide/developing/tools/proguard.html
7
+
8
+ # If your project uses WebView with JS, uncomment the following
9
+ # and specify the fully qualified class name to the JavaScript interface
10
+ # class:
11
+ #-keepclassmembers class fqcn.of.javascript.interface.for.webview {
12
+ # public *;
13
+ #}
14
+
15
+ # Uncomment this to preserve the line number information for
16
+ # debugging stack traces.
17
+ #-keepattributes SourceFile,LineNumberTable
18
+
19
+ # If you keep the line number information, uncomment this to
20
+ # hide the original source file name.
21
+ #-renamesourcefileattribute SourceFile
android/app/src/androidTest/assets/table.jpg ADDED
android/app/src/androidTest/assets/table_results.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ dining_table 27.492085 97.94615 623.1435 444.8627 0.48828125
2
+ knife 342.53433 243.71082 583.89185 416.34595 0.4765625
3
+ cup 68.025925 197.5857 202.02031 374.2206 0.4375
4
+ book 185.43098 139.64153 244.51149 203.37737 0.3125
android/app/src/androidTest/java/AndroidManifest.xml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <manifest xmlns:android="http://schemas.android.com/apk/res/android"
3
+ package="org.tensorflow.lite.examples.detection">
4
+ <uses-sdk />
5
+ </manifest>
android/app/src/androidTest/java/org/tensorflow/lite/examples/detection/DetectorTest.java ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+ * Copyright 2020 The TensorFlow Authors. All Rights Reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ package org.tensorflow.lite.examples.detection;
18
+
19
+ import static com.google.common.truth.Truth.assertThat;
20
+ import static java.lang.Math.abs;
21
+ import static java.lang.Math.max;
22
+ import static java.lang.Math.min;
23
+
24
+ import android.content.res.AssetManager;
25
+ import android.graphics.Bitmap;
26
+ import android.graphics.Bitmap.Config;
27
+ import android.graphics.BitmapFactory;
28
+ import android.graphics.Canvas;
29
+ import android.graphics.Matrix;
30
+ import android.graphics.RectF;
31
+ import android.util.Size;
32
+ import androidx.test.ext.junit.runners.AndroidJUnit4;
33
+ import androidx.test.platform.app.InstrumentationRegistry;
34
+ import java.io.IOException;
35
+ import java.io.InputStream;
36
+ import java.util.ArrayList;
37
+ import java.util.List;
38
+ import java.util.Scanner;
39
+ import org.junit.Before;
40
+ import org.junit.Test;
41
+ import org.junit.runner.RunWith;
42
+ import org.tensorflow.lite.examples.detection.env.ImageUtils;
43
+ import org.tensorflow.lite.examples.detection.tflite.Classifier;
44
+ import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
45
+ import org.tensorflow.lite.examples.detection.tflite.TFLiteObjectDetectionAPIModel;
46
+
47
+ /** Golden test for Object Detection Reference app. */
48
+ @RunWith(AndroidJUnit4.class)
49
+ public class DetectorTest {
50
+
51
+ private static final int MODEL_INPUT_SIZE = 300;
52
+ private static final boolean IS_MODEL_QUANTIZED = true;
53
+ private static final String MODEL_FILE = "detect.tflite";
54
+ private static final String LABELS_FILE = "file:///android_asset/labelmap.txt";
55
+ private static final Size IMAGE_SIZE = new Size(640, 480);
56
+
57
+ private Classifier detector;
58
+ private Bitmap croppedBitmap;
59
+ private Matrix frameToCropTransform;
60
+ private Matrix cropToFrameTransform;
61
+
62
+ @Before
63
+ public void setUp() throws IOException {
64
+ AssetManager assetManager =
65
+ InstrumentationRegistry.getInstrumentation().getContext().getAssets();
66
+ detector =
67
+ TFLiteObjectDetectionAPIModel.create(
68
+ assetManager,
69
+ MODEL_FILE,
70
+ LABELS_FILE,
71
+ MODEL_INPUT_SIZE,
72
+ IS_MODEL_QUANTIZED);
73
+ int cropSize = MODEL_INPUT_SIZE;
74
+ int previewWidth = IMAGE_SIZE.getWidth();
75
+ int previewHeight = IMAGE_SIZE.getHeight();
76
+ int sensorOrientation = 0;
77
+ croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
78
+
79
+ frameToCropTransform =
80
+ ImageUtils.getTransformationMatrix(
81
+ previewWidth, previewHeight,
82
+ cropSize, cropSize,
83
+ sensorOrientation, false);
84
+ cropToFrameTransform = new Matrix();
85
+ frameToCropTransform.invert(cropToFrameTransform);
86
+ }
87
+
88
+ @Test
89
+ public void detectionResultsShouldNotChange() throws Exception {
90
+ Canvas canvas = new Canvas(croppedBitmap);
91
+ canvas.drawBitmap(loadImage("table.jpg"), frameToCropTransform, null);
92
+ final List<Recognition> results = detector.recognizeImage(croppedBitmap);
93
+ final List<Recognition> expected = loadRecognitions("table_results.txt");
94
+
95
+ for (Recognition target : expected) {
96
+ // Find a matching result in results
97
+ boolean matched = false;
98
+ for (Recognition item : results) {
99
+ RectF bbox = new RectF();
100
+ cropToFrameTransform.mapRect(bbox, item.getLocation());
101
+ if (item.getTitle().equals(target.getTitle())
102
+ && matchBoundingBoxes(bbox, target.getLocation())
103
+ && matchConfidence(item.getConfidence(), target.getConfidence())) {
104
+ matched = true;
105
+ break;
106
+ }
107
+ }
108
+ assertThat(matched).isTrue();
109
+ }
110
+ }
111
+
112
+ // Confidence tolerance: absolute 1%
113
+ private static boolean matchConfidence(float a, float b) {
114
+ return abs(a - b) < 0.01;
115
+ }
116
+
117
+ // Bounding Box tolerance: overlapped area > 95% of each one
118
+ private static boolean matchBoundingBoxes(RectF a, RectF b) {
119
+ float areaA = a.width() * a.height();
120
+ float areaB = b.width() * b.height();
121
+ RectF overlapped =
122
+ new RectF(
123
+ max(a.left, b.left), max(a.top, b.top), min(a.right, b.right), min(a.bottom, b.bottom));
124
+ float overlappedArea = overlapped.width() * overlapped.height();
125
+ return overlappedArea > 0.95 * areaA && overlappedArea > 0.95 * areaB;
126
+ }
127
+
128
+ private static Bitmap loadImage(String fileName) throws Exception {
129
+ AssetManager assetManager =
130
+ InstrumentationRegistry.getInstrumentation().getContext().getAssets();
131
+ InputStream inputStream = assetManager.open(fileName);
132
+ return BitmapFactory.decodeStream(inputStream);
133
+ }
134
+
135
+ // The format of result:
136
+ // category bbox.left bbox.top bbox.right bbox.bottom confidence
137
+ // ...
138
+ // Example:
139
+ // Apple 99 25 30 75 80 0.99
140
+ // Banana 25 90 75 200 0.98
141
+ // ...
142
+ private static List<Recognition> loadRecognitions(String fileName) throws Exception {
143
+ AssetManager assetManager =
144
+ InstrumentationRegistry.getInstrumentation().getContext().getAssets();
145
+ InputStream inputStream = assetManager.open(fileName);
146
+ Scanner scanner = new Scanner(inputStream);
147
+ List<Recognition> result = new ArrayList<>();
148
+ while (scanner.hasNext()) {
149
+ String category = scanner.next();
150
+ category = category.replace('_', ' ');
151
+ if (!scanner.hasNextFloat()) {
152
+ break;
153
+ }
154
+ float left = scanner.nextFloat();
155
+ float top = scanner.nextFloat();
156
+ float right = scanner.nextFloat();
157
+ float bottom = scanner.nextFloat();
158
+ RectF boundingBox = new RectF(left, top, right, bottom);
159
+ float confidence = scanner.nextFloat();
160
+ Recognition recognition = new Recognition(null, category, confidence, boundingBox);
161
+ result.add(recognition);
162
+ }
163
+ return result;
164
+ }
165
+ }
android/app/src/main/AndroidManifest.xml ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <manifest xmlns:android="http://schemas.android.com/apk/res/android"
2
+ package="org.tensorflow.lite.examples.detection">
3
+
4
+ <uses-sdk />
5
+
6
+ <uses-permission android:name="android.permission.CAMERA" />
7
+
8
+ <uses-feature android:name="android.hardware.camera" />
9
+ <uses-feature android:name="android.hardware.camera.autofocus" />
10
+ <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
11
+ <uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
12
+
13
+ <application
14
+ android:allowBackup="false"
15
+ android:icon="@mipmap/ic_launcher"
16
+ android:label="@string/tfe_od_app_name"
17
+ android:roundIcon="@mipmap/ic_launcher_round"
18
+ android:supportsRtl="true"
19
+ android:theme="@style/AppTheme.ObjectDetection">
20
+
21
+ <activity
22
+ android:name=".DetectorActivity"
23
+ android:label="@string/tfe_od_app_name"
24
+ android:screenOrientation="portrait">
25
+ </activity>
26
+
27
+ <activity android:name=".MainActivity">
28
+ <intent-filter>
29
+ <action android:name="android.intent.action.MAIN" />
30
+ <category android:name="android.intent.category.LAUNCHER" />
31
+ </intent-filter>
32
+ </activity>
33
+
34
+ </application>
35
+ </manifest>
android/app/src/main/assets/coco.txt ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ person
2
+ bicycle
3
+ car
4
+ motorbike
5
+ aeroplane
6
+ bus
7
+ train
8
+ truck
9
+ boat
10
+ traffic light
11
+ fire hydrant
12
+ stop sign
13
+ parking meter
14
+ bench
15
+ bird
16
+ cat
17
+ dog
18
+ horse
19
+ sheep
20
+ cow
21
+ elephant
22
+ bear
23
+ zebra
24
+ giraffe
25
+ backpack
26
+ umbrella
27
+ handbag
28
+ tie
29
+ suitcase
30
+ frisbee
31
+ skis
32
+ snowboard
33
+ sports ball
34
+ kite
35
+ baseball bat
36
+ baseball glove
37
+ skateboard
38
+ surfboard
39
+ tennis racket
40
+ bottle
41
+ wine glass
42
+ cup
43
+ fork
44
+ knife
45
+ spoon
46
+ bowl
47
+ banana
48
+ apple
49
+ sandwich
50
+ orange
51
+ broccoli
52
+ carrot
53
+ hot dog
54
+ pizza
55
+ donut
56
+ cake
57
+ chair
58
+ sofa
59
+ potted plant
60
+ bed
61
+ dining table
62
+ toilet
63
+ tvmonitor
64
+ laptop
65
+ mouse
66
+ remote
67
+ keyboard
68
+ cell phone
69
+ microwave
70
+ oven
71
+ toaster
72
+ sink
73
+ refrigerator
74
+ book
75
+ clock
76
+ vase
77
+ scissors
78
+ teddy bear
79
+ hair drier
80
+ toothbrush
android/app/src/main/assets/kite.jpg ADDED
android/app/src/main/assets/labelmap.txt ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ???
2
+ person
3
+ bicycle
4
+ car
5
+ motorcycle
6
+ airplane
7
+ bus
8
+ train
9
+ truck
10
+ boat
11
+ traffic light
12
+ fire hydrant
13
+ ???
14
+ stop sign
15
+ parking meter
16
+ bench
17
+ bird
18
+ cat
19
+ dog
20
+ horse
21
+ sheep
22
+ cow
23
+ elephant
24
+ bear
25
+ zebra
26
+ giraffe
27
+ ???
28
+ backpack
29
+ umbrella
30
+ ???
31
+ ???
32
+ handbag
33
+ tie
34
+ suitcase
35
+ frisbee
36
+ skis
37
+ snowboard
38
+ sports ball
39
+ kite
40
+ baseball bat
41
+ baseball glove
42
+ skateboard
43
+ surfboard
44
+ tennis racket
45
+ bottle
46
+ ???
47
+ wine glass
48
+ cup
49
+ fork
50
+ knife
51
+ spoon
52
+ bowl
53
+ banana
54
+ apple
55
+ sandwich
56
+ orange
57
+ broccoli
58
+ carrot
59
+ hot dog
60
+ pizza
61
+ donut
62
+ cake
63
+ chair
64
+ couch
65
+ potted plant
66
+ bed
67
+ ???
68
+ dining table
69
+ ???
70
+ ???
71
+ toilet
72
+ ???
73
+ tv
74
+ laptop
75
+ mouse
76
+ remote
77
+ keyboard
78
+ cell phone
79
+ microwave
80
+ oven
81
+ toaster
82
+ sink
83
+ refrigerator
84
+ ???
85
+ book
86
+ clock
87
+ vase
88
+ scissors
89
+ teddy bear
90
+ hair drier
91
+ toothbrush
android/app/src/main/assets/yolov4-416-fp32.tflite ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7160a2f3e58629a15506a6c77685fb5583cddf186dac3015be7998975d662465
3
+ size 24279948
android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraActivity.java ADDED
@@ -0,0 +1,550 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ package org.tensorflow.lite.examples.detection;
18
+
19
+ import android.Manifest;
20
+ import android.app.Fragment;
21
+ import android.content.Context;
22
+ import android.content.pm.PackageManager;
23
+ import android.hardware.Camera;
24
+ import android.hardware.camera2.CameraAccessException;
25
+ import android.hardware.camera2.CameraCharacteristics;
26
+ import android.hardware.camera2.CameraManager;
27
+ import android.hardware.camera2.params.StreamConfigurationMap;
28
+ import android.media.Image;
29
+ import android.media.Image.Plane;
30
+ import android.media.ImageReader;
31
+ import android.media.ImageReader.OnImageAvailableListener;
32
+ import android.os.Build;
33
+ import android.os.Bundle;
34
+ import android.os.Handler;
35
+ import android.os.HandlerThread;
36
+ import android.os.Trace;
37
+ import androidx.annotation.NonNull;
38
+ import androidx.appcompat.app.AppCompatActivity;
39
+ import androidx.appcompat.widget.SwitchCompat;
40
+ import androidx.appcompat.widget.Toolbar;
41
+ import android.util.Size;
42
+ import android.view.Surface;
43
+ import android.view.View;
44
+ import android.view.ViewTreeObserver;
45
+ import android.view.WindowManager;
46
+ import android.widget.CompoundButton;
47
+ import android.widget.ImageView;
48
+ import android.widget.LinearLayout;
49
+ import android.widget.TextView;
50
+ import android.widget.Toast;
51
+ import com.google.android.material.bottomsheet.BottomSheetBehavior;
52
+ import java.nio.ByteBuffer;
53
+ import org.tensorflow.lite.examples.detection.env.ImageUtils;
54
+ import org.tensorflow.lite.examples.detection.env.Logger;
55
+
56
+ public abstract class CameraActivity extends AppCompatActivity
57
+ implements OnImageAvailableListener,
58
+ Camera.PreviewCallback,
59
+ CompoundButton.OnCheckedChangeListener,
60
+ View.OnClickListener {
61
+ private static final Logger LOGGER = new Logger();
62
+
63
+ private static final int PERMISSIONS_REQUEST = 1;
64
+
65
+ private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
66
+ protected int previewWidth = 0;
67
+ protected int previewHeight = 0;
68
+ private boolean debug = false;
69
+ private Handler handler;
70
+ private HandlerThread handlerThread;
71
+ private boolean useCamera2API;
72
+ private boolean isProcessingFrame = false;
73
+ private byte[][] yuvBytes = new byte[3][];
74
+ private int[] rgbBytes = null;
75
+ private int yRowStride;
76
+ private Runnable postInferenceCallback;
77
+ private Runnable imageConverter;
78
+
79
+ private LinearLayout bottomSheetLayout;
80
+ private LinearLayout gestureLayout;
81
+ private BottomSheetBehavior<LinearLayout> sheetBehavior;
82
+
83
+ protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView;
84
+ protected ImageView bottomSheetArrowImageView;
85
+ private ImageView plusImageView, minusImageView;
86
+ private SwitchCompat apiSwitchCompat;
87
+ private TextView threadsTextView;
88
+
89
+ @Override
90
+ protected void onCreate(final Bundle savedInstanceState) {
91
+ LOGGER.d("onCreate " + this);
92
+ super.onCreate(null);
93
+ getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
94
+
95
+ setContentView(R.layout.tfe_od_activity_camera);
96
+ Toolbar toolbar = findViewById(R.id.toolbar);
97
+ setSupportActionBar(toolbar);
98
+ getSupportActionBar().setDisplayShowTitleEnabled(false);
99
+
100
+ if (hasPermission()) {
101
+ setFragment();
102
+ } else {
103
+ requestPermission();
104
+ }
105
+
106
+ threadsTextView = findViewById(R.id.threads);
107
+ plusImageView = findViewById(R.id.plus);
108
+ minusImageView = findViewById(R.id.minus);
109
+ apiSwitchCompat = findViewById(R.id.api_info_switch);
110
+ bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
111
+ gestureLayout = findViewById(R.id.gesture_layout);
112
+ sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout);
113
+ bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow);
114
+
115
+ ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
116
+ vto.addOnGlobalLayoutListener(
117
+ new ViewTreeObserver.OnGlobalLayoutListener() {
118
+ @Override
119
+ public void onGlobalLayout() {
120
+ if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
121
+ gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
122
+ } else {
123
+ gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
124
+ }
125
+ // int width = bottomSheetLayout.getMeasuredWidth();
126
+ int height = gestureLayout.getMeasuredHeight();
127
+
128
+ sheetBehavior.setPeekHeight(height);
129
+ }
130
+ });
131
+ sheetBehavior.setHideable(false);
132
+
133
+ sheetBehavior.setBottomSheetCallback(
134
+ new BottomSheetBehavior.BottomSheetCallback() {
135
+ @Override
136
+ public void onStateChanged(@NonNull View bottomSheet, int newState) {
137
+ switch (newState) {
138
+ case BottomSheetBehavior.STATE_HIDDEN:
139
+ break;
140
+ case BottomSheetBehavior.STATE_EXPANDED:
141
+ {
142
+ bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
143
+ }
144
+ break;
145
+ case BottomSheetBehavior.STATE_COLLAPSED:
146
+ {
147
+ bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
148
+ }
149
+ break;
150
+ case BottomSheetBehavior.STATE_DRAGGING:
151
+ break;
152
+ case BottomSheetBehavior.STATE_SETTLING:
153
+ bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
154
+ break;
155
+ }
156
+ }
157
+
158
+ @Override
159
+ public void onSlide(@NonNull View bottomSheet, float slideOffset) {}
160
+ });
161
+
162
+ frameValueTextView = findViewById(R.id.frame_info);
163
+ cropValueTextView = findViewById(R.id.crop_info);
164
+ inferenceTimeTextView = findViewById(R.id.inference_info);
165
+
166
+ apiSwitchCompat.setOnCheckedChangeListener(this);
167
+
168
+ plusImageView.setOnClickListener(this);
169
+ minusImageView.setOnClickListener(this);
170
+ }
171
+
172
+ protected int[] getRgbBytes() {
173
+ imageConverter.run();
174
+ return rgbBytes;
175
+ }
176
+
177
+ protected int getLuminanceStride() {
178
+ return yRowStride;
179
+ }
180
+
181
+ protected byte[] getLuminance() {
182
+ return yuvBytes[0];
183
+ }
184
+
185
+ /** Callback for android.hardware.Camera API */
186
+ @Override
187
+ public void onPreviewFrame(final byte[] bytes, final Camera camera) {
188
+ if (isProcessingFrame) {
189
+ LOGGER.w("Dropping frame!");
190
+ return;
191
+ }
192
+
193
+ try {
194
+ // Initialize the storage bitmaps once when the resolution is known.
195
+ if (rgbBytes == null) {
196
+ Camera.Size previewSize = camera.getParameters().getPreviewSize();
197
+ previewHeight = previewSize.height;
198
+ previewWidth = previewSize.width;
199
+ rgbBytes = new int[previewWidth * previewHeight];
200
+ onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
201
+ }
202
+ } catch (final Exception e) {
203
+ LOGGER.e(e, "Exception!");
204
+ return;
205
+ }
206
+
207
+ isProcessingFrame = true;
208
+ yuvBytes[0] = bytes;
209
+ yRowStride = previewWidth;
210
+
211
+ imageConverter =
212
+ new Runnable() {
213
+ @Override
214
+ public void run() {
215
+ ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
216
+ }
217
+ };
218
+
219
+ postInferenceCallback =
220
+ new Runnable() {
221
+ @Override
222
+ public void run() {
223
+ camera.addCallbackBuffer(bytes);
224
+ isProcessingFrame = false;
225
+ }
226
+ };
227
+ processImage();
228
+ }
229
+
230
+ /** Callback for Camera2 API */
231
+ @Override
232
+ public void onImageAvailable(final ImageReader reader) {
233
+ // We need wait until we have some size from onPreviewSizeChosen
234
+ if (previewWidth == 0 || previewHeight == 0) {
235
+ return;
236
+ }
237
+ if (rgbBytes == null) {
238
+ rgbBytes = new int[previewWidth * previewHeight];
239
+ }
240
+ try {
241
+ final Image image = reader.acquireLatestImage();
242
+
243
+ if (image == null) {
244
+ return;
245
+ }
246
+
247
+ if (isProcessingFrame) {
248
+ image.close();
249
+ return;
250
+ }
251
+ isProcessingFrame = true;
252
+ Trace.beginSection("imageAvailable");
253
+ final Plane[] planes = image.getPlanes();
254
+ fillBytes(planes, yuvBytes);
255
+ yRowStride = planes[0].getRowStride();
256
+ final int uvRowStride = planes[1].getRowStride();
257
+ final int uvPixelStride = planes[1].getPixelStride();
258
+
259
+ imageConverter =
260
+ new Runnable() {
261
+ @Override
262
+ public void run() {
263
+ ImageUtils.convertYUV420ToARGB8888(
264
+ yuvBytes[0],
265
+ yuvBytes[1],
266
+ yuvBytes[2],
267
+ previewWidth,
268
+ previewHeight,
269
+ yRowStride,
270
+ uvRowStride,
271
+ uvPixelStride,
272
+ rgbBytes);
273
+ }
274
+ };
275
+
276
+ postInferenceCallback =
277
+ new Runnable() {
278
+ @Override
279
+ public void run() {
280
+ image.close();
281
+ isProcessingFrame = false;
282
+ }
283
+ };
284
+
285
+ processImage();
286
+ } catch (final Exception e) {
287
+ LOGGER.e(e, "Exception!");
288
+ Trace.endSection();
289
+ return;
290
+ }
291
+ Trace.endSection();
292
+ }
293
+
294
+ @Override
295
+ public synchronized void onStart() {
296
+ LOGGER.d("onStart " + this);
297
+ super.onStart();
298
+ }
299
+
300
+ @Override
301
+ public synchronized void onResume() {
302
+ LOGGER.d("onResume " + this);
303
+ super.onResume();
304
+
305
+ handlerThread = new HandlerThread("inference");
306
+ handlerThread.start();
307
+ handler = new Handler(handlerThread.getLooper());
308
+ }
309
+
310
+ @Override
311
+ public synchronized void onPause() {
312
+ LOGGER.d("onPause " + this);
313
+
314
+ handlerThread.quitSafely();
315
+ try {
316
+ handlerThread.join();
317
+ handlerThread = null;
318
+ handler = null;
319
+ } catch (final InterruptedException e) {
320
+ LOGGER.e(e, "Exception!");
321
+ }
322
+
323
+ super.onPause();
324
+ }
325
+
326
+ @Override
327
+ public synchronized void onStop() {
328
+ LOGGER.d("onStop " + this);
329
+ super.onStop();
330
+ }
331
+
332
+ @Override
333
+ public synchronized void onDestroy() {
334
+ LOGGER.d("onDestroy " + this);
335
+ super.onDestroy();
336
+ }
337
+
338
+ protected synchronized void runInBackground(final Runnable r) {
339
+ if (handler != null) {
340
+ handler.post(r);
341
+ }
342
+ }
343
+
344
+ @Override
345
+ public void onRequestPermissionsResult(
346
+ final int requestCode, final String[] permissions, final int[] grantResults) {
347
+ super.onRequestPermissionsResult(requestCode, permissions, grantResults);
348
+ if (requestCode == PERMISSIONS_REQUEST) {
349
+ if (allPermissionsGranted(grantResults)) {
350
+ setFragment();
351
+ } else {
352
+ requestPermission();
353
+ }
354
+ }
355
+ }
356
+
357
+ private static boolean allPermissionsGranted(final int[] grantResults) {
358
+ for (int result : grantResults) {
359
+ if (result != PackageManager.PERMISSION_GRANTED) {
360
+ return false;
361
+ }
362
+ }
363
+ return true;
364
+ }
365
+
366
+ private boolean hasPermission() {
367
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
368
+ return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;
369
+ } else {
370
+ return true;
371
+ }
372
+ }
373
+
374
+ private void requestPermission() {
375
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
376
+ if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
377
+ Toast.makeText(
378
+ CameraActivity.this,
379
+ "Camera permission is required for this demo",
380
+ Toast.LENGTH_LONG)
381
+ .show();
382
+ }
383
+ requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST);
384
+ }
385
+ }
386
+
387
+ // Returns true if the device supports the required hardware level, or better.
388
+ private boolean isHardwareLevelSupported(
389
+ CameraCharacteristics characteristics, int requiredLevel) {
390
+ int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
391
+ if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
392
+ return requiredLevel == deviceLevel;
393
+ }
394
+ // deviceLevel is not LEGACY, can use numerical sort
395
+ return requiredLevel <= deviceLevel;
396
+ }
397
+
398
+ private String chooseCamera() {
399
+ final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
400
+ try {
401
+ for (final String cameraId : manager.getCameraIdList()) {
402
+ final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
403
+
404
+ // We don't use a front facing camera in this sample.
405
+ final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
406
+ if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
407
+ continue;
408
+ }
409
+
410
+ final StreamConfigurationMap map =
411
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
412
+
413
+ if (map == null) {
414
+ continue;
415
+ }
416
+
417
+ // Fallback to camera1 API for internal cameras that don't have full support.
418
+ // This should help with legacy situations where using the camera2 API causes
419
+ // distorted or otherwise broken previews.
420
+ useCamera2API =
421
+ (facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
422
+ || isHardwareLevelSupported(
423
+ characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
424
+ LOGGER.i("Camera API lv2?: %s", useCamera2API);
425
+ return cameraId;
426
+ }
427
+ } catch (CameraAccessException e) {
428
+ LOGGER.e(e, "Not allowed to access camera");
429
+ }
430
+
431
+ return null;
432
+ }
433
+
434
+ protected void setFragment() {
435
+ String cameraId = chooseCamera();
436
+
437
+ Fragment fragment;
438
+ if (useCamera2API) {
439
+ CameraConnectionFragment camera2Fragment =
440
+ CameraConnectionFragment.newInstance(
441
+ new CameraConnectionFragment.ConnectionCallback() {
442
+ @Override
443
+ public void onPreviewSizeChosen(final Size size, final int rotation) {
444
+ previewHeight = size.getHeight();
445
+ previewWidth = size.getWidth();
446
+ CameraActivity.this.onPreviewSizeChosen(size, rotation);
447
+ }
448
+ },
449
+ this,
450
+ getLayoutId(),
451
+ getDesiredPreviewFrameSize());
452
+
453
+ camera2Fragment.setCamera(cameraId);
454
+ fragment = camera2Fragment;
455
+ } else {
456
+ fragment =
457
+ new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
458
+ }
459
+
460
+ getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
461
+ }
462
+
463
+ protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
464
+ // Because of the variable row stride it's not possible to know in
465
+ // advance the actual necessary dimensions of the yuv planes.
466
+ for (int i = 0; i < planes.length; ++i) {
467
+ final ByteBuffer buffer = planes[i].getBuffer();
468
+ if (yuvBytes[i] == null) {
469
+ LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
470
+ yuvBytes[i] = new byte[buffer.capacity()];
471
+ }
472
+ buffer.get(yuvBytes[i]);
473
+ }
474
+ }
475
+
476
+ public boolean isDebug() {
477
+ return debug;
478
+ }
479
+
480
+ protected void readyForNextImage() {
481
+ if (postInferenceCallback != null) {
482
+ postInferenceCallback.run();
483
+ }
484
+ }
485
+
486
+ protected int getScreenOrientation() {
487
+ switch (getWindowManager().getDefaultDisplay().getRotation()) {
488
+ case Surface.ROTATION_270:
489
+ return 270;
490
+ case Surface.ROTATION_180:
491
+ return 180;
492
+ case Surface.ROTATION_90:
493
+ return 90;
494
+ default:
495
+ return 0;
496
+ }
497
+ }
498
+
499
+ @Override
500
+ public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
501
+ setUseNNAPI(isChecked);
502
+ if (isChecked) apiSwitchCompat.setText("NNAPI");
503
+ else apiSwitchCompat.setText("TFLITE");
504
+ }
505
+
506
+ @Override
507
+ public void onClick(View v) {
508
+ if (v.getId() == R.id.plus) {
509
+ String threads = threadsTextView.getText().toString().trim();
510
+ int numThreads = Integer.parseInt(threads);
511
+ if (numThreads >= 9) return;
512
+ numThreads++;
513
+ threadsTextView.setText(String.valueOf(numThreads));
514
+ setNumThreads(numThreads);
515
+ } else if (v.getId() == R.id.minus) {
516
+ String threads = threadsTextView.getText().toString().trim();
517
+ int numThreads = Integer.parseInt(threads);
518
+ if (numThreads == 1) {
519
+ return;
520
+ }
521
+ numThreads--;
522
+ threadsTextView.setText(String.valueOf(numThreads));
523
+ setNumThreads(numThreads);
524
+ }
525
+ }
526
+
527
+ protected void showFrameInfo(String frameInfo) {
528
+ frameValueTextView.setText(frameInfo);
529
+ }
530
+
531
+ protected void showCropInfo(String cropInfo) {
532
+ cropValueTextView.setText(cropInfo);
533
+ }
534
+
535
+ protected void showInference(String inferenceTime) {
536
+ inferenceTimeTextView.setText(inferenceTime);
537
+ }
538
+
539
+ protected abstract void processImage();
540
+
541
+ protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
542
+
543
+ protected abstract int getLayoutId();
544
+
545
+ protected abstract Size getDesiredPreviewFrameSize();
546
+
547
+ protected abstract void setNumThreads(int numThreads);
548
+
549
+ protected abstract void setUseNNAPI(boolean isChecked);
550
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/CameraConnectionFragment.java ADDED
@@ -0,0 +1,569 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ package org.tensorflow.lite.examples.detection;
18
+
19
+ import android.annotation.SuppressLint;
20
+ import android.app.Activity;
21
+ import android.app.AlertDialog;
22
+ import android.app.Dialog;
23
+ import android.app.DialogFragment;
24
+ import android.app.Fragment;
25
+ import android.content.Context;
26
+ import android.content.DialogInterface;
27
+ import android.content.res.Configuration;
28
+ import android.graphics.ImageFormat;
29
+ import android.graphics.Matrix;
30
+ import android.graphics.RectF;
31
+ import android.graphics.SurfaceTexture;
32
+ import android.hardware.camera2.CameraAccessException;
33
+ import android.hardware.camera2.CameraCaptureSession;
34
+ import android.hardware.camera2.CameraCharacteristics;
35
+ import android.hardware.camera2.CameraDevice;
36
+ import android.hardware.camera2.CameraManager;
37
+ import android.hardware.camera2.CaptureRequest;
38
+ import android.hardware.camera2.CaptureResult;
39
+ import android.hardware.camera2.TotalCaptureResult;
40
+ import android.hardware.camera2.params.StreamConfigurationMap;
41
+ import android.media.ImageReader;
42
+ import android.media.ImageReader.OnImageAvailableListener;
43
+ import android.os.Bundle;
44
+ import android.os.Handler;
45
+ import android.os.HandlerThread;
46
+ import android.text.TextUtils;
47
+ import android.util.Size;
48
+ import android.util.SparseIntArray;
49
+ import android.view.LayoutInflater;
50
+ import android.view.Surface;
51
+ import android.view.TextureView;
52
+ import android.view.View;
53
+ import android.view.ViewGroup;
54
+ import android.widget.Toast;
55
+ import java.util.ArrayList;
56
+ import java.util.Arrays;
57
+ import java.util.Collections;
58
+ import java.util.Comparator;
59
+ import java.util.List;
60
+ import java.util.concurrent.Semaphore;
61
+ import java.util.concurrent.TimeUnit;
62
+ import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
63
+ import org.tensorflow.lite.examples.detection.env.Logger;
64
+
65
+ @SuppressLint("ValidFragment")
66
+ public class CameraConnectionFragment extends Fragment {
67
+ private static final Logger LOGGER = new Logger();
68
+
69
+ /**
70
+ * The camera preview size will be chosen to be the smallest frame by pixel size capable of
71
+ * containing a DESIRED_SIZE x DESIRED_SIZE square.
72
+ */
73
+ private static final int MINIMUM_PREVIEW_SIZE = 320;
74
+
75
+ /** Conversion from screen rotation to JPEG orientation. */
76
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
77
+
78
+ private static final String FRAGMENT_DIALOG = "dialog";
79
+
80
+ static {
81
+ ORIENTATIONS.append(Surface.ROTATION_0, 90);
82
+ ORIENTATIONS.append(Surface.ROTATION_90, 0);
83
+ ORIENTATIONS.append(Surface.ROTATION_180, 270);
84
+ ORIENTATIONS.append(Surface.ROTATION_270, 180);
85
+ }
86
+
87
+ /** A {@link Semaphore} to prevent the app from exiting before closing the camera. */
88
+ private final Semaphore cameraOpenCloseLock = new Semaphore(1);
89
+ /** A {@link OnImageAvailableListener} to receive frames as they are available. */
90
+ private final OnImageAvailableListener imageListener;
91
+ /** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
92
+ private final Size inputSize;
93
+ /** The layout identifier to inflate for this Fragment. */
94
+ private final int layout;
95
+
96
+ private final ConnectionCallback cameraConnectionCallback;
97
+ private final CameraCaptureSession.CaptureCallback captureCallback =
98
+ new CameraCaptureSession.CaptureCallback() {
99
+ @Override
100
+ public void onCaptureProgressed(
101
+ final CameraCaptureSession session,
102
+ final CaptureRequest request,
103
+ final CaptureResult partialResult) {}
104
+
105
+ @Override
106
+ public void onCaptureCompleted(
107
+ final CameraCaptureSession session,
108
+ final CaptureRequest request,
109
+ final TotalCaptureResult result) {}
110
+ };
111
+ /** ID of the current {@link CameraDevice}. */
112
+ private String cameraId;
113
+ /** An {@link AutoFitTextureView} for camera preview. */
114
+ private AutoFitTextureView textureView;
115
+ /** A {@link CameraCaptureSession } for camera preview. */
116
+ private CameraCaptureSession captureSession;
117
+ /** A reference to the opened {@link CameraDevice}. */
118
+ private CameraDevice cameraDevice;
119
+ /** The rotation in degrees of the camera sensor from the display. */
120
+ private Integer sensorOrientation;
121
+ /** The {@link Size} of camera preview. */
122
+ private Size previewSize;
123
+ /** An additional thread for running tasks that shouldn't block the UI. */
124
+ private HandlerThread backgroundThread;
125
+ /** A {@link Handler} for running tasks in the background. */
126
+ private Handler backgroundHandler;
127
+ /** An {@link ImageReader} that handles preview frame capture. */
128
+ private ImageReader previewReader;
129
+ /** {@link CaptureRequest.Builder} for the camera preview */
130
+ private CaptureRequest.Builder previewRequestBuilder;
131
+ /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
132
+ private CaptureRequest previewRequest;
133
+ /** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */
134
+ private final CameraDevice.StateCallback stateCallback =
135
+ new CameraDevice.StateCallback() {
136
+ @Override
137
+ public void onOpened(final CameraDevice cd) {
138
+ // This method is called when the camera is opened. We start camera preview here.
139
+ cameraOpenCloseLock.release();
140
+ cameraDevice = cd;
141
+ createCameraPreviewSession();
142
+ }
143
+
144
+ @Override
145
+ public void onDisconnected(final CameraDevice cd) {
146
+ cameraOpenCloseLock.release();
147
+ cd.close();
148
+ cameraDevice = null;
149
+ }
150
+
151
+ @Override
152
+ public void onError(final CameraDevice cd, final int error) {
153
+ cameraOpenCloseLock.release();
154
+ cd.close();
155
+ cameraDevice = null;
156
+ final Activity activity = getActivity();
157
+ if (null != activity) {
158
+ activity.finish();
159
+ }
160
+ }
161
+ };
162
+ /**
163
+ * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
164
+ * TextureView}.
165
+ */
166
+ private final TextureView.SurfaceTextureListener surfaceTextureListener =
167
+ new TextureView.SurfaceTextureListener() {
168
+ @Override
169
+ public void onSurfaceTextureAvailable(
170
+ final SurfaceTexture texture, final int width, final int height) {
171
+ openCamera(width, height);
172
+ }
173
+
174
+ @Override
175
+ public void onSurfaceTextureSizeChanged(
176
+ final SurfaceTexture texture, final int width, final int height) {
177
+ configureTransform(width, height);
178
+ }
179
+
180
+ @Override
181
+ public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
182
+ return true;
183
+ }
184
+
185
+ @Override
186
+ public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
187
+ };
188
+
189
+ private CameraConnectionFragment(
190
+ final ConnectionCallback connectionCallback,
191
+ final OnImageAvailableListener imageListener,
192
+ final int layout,
193
+ final Size inputSize) {
194
+ this.cameraConnectionCallback = connectionCallback;
195
+ this.imageListener = imageListener;
196
+ this.layout = layout;
197
+ this.inputSize = inputSize;
198
+ }
199
+
200
+ /**
201
+ * Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
202
+ * width and height are at least as large as the minimum of both, or an exact match if possible.
203
+ *
204
+ * @param choices The list of sizes that the camera supports for the intended output class
205
+ * @param width The minimum desired width
206
+ * @param height The minimum desired height
207
+ * @return The optimal {@code Size}, or an arbitrary one if none were big enough
208
+ */
209
+ protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
210
+ final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
211
+ final Size desiredSize = new Size(width, height);
212
+
213
+ // Collect the supported resolutions that are at least as big as the preview Surface
214
+ boolean exactSizeFound = false;
215
+ final List<Size> bigEnough = new ArrayList<Size>();
216
+ final List<Size> tooSmall = new ArrayList<Size>();
217
+ for (final Size option : choices) {
218
+ if (option.equals(desiredSize)) {
219
+ // Set the size but don't return yet so that remaining sizes will still be logged.
220
+ exactSizeFound = true;
221
+ }
222
+
223
+ if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
224
+ bigEnough.add(option);
225
+ } else {
226
+ tooSmall.add(option);
227
+ }
228
+ }
229
+
230
+ LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
231
+ LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
232
+ LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
233
+
234
+ if (exactSizeFound) {
235
+ LOGGER.i("Exact size match found.");
236
+ return desiredSize;
237
+ }
238
+
239
+ // Pick the smallest of those, assuming we found any
240
+ if (bigEnough.size() > 0) {
241
+ final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
242
+ LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
243
+ return chosenSize;
244
+ } else {
245
+ LOGGER.e("Couldn't find any suitable preview size");
246
+ return choices[0];
247
+ }
248
+ }
249
+
250
+ public static CameraConnectionFragment newInstance(
251
+ final ConnectionCallback callback,
252
+ final OnImageAvailableListener imageListener,
253
+ final int layout,
254
+ final Size inputSize) {
255
+ return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
256
+ }
257
+
258
+ /**
259
+ * Shows a {@link Toast} on the UI thread.
260
+ *
261
+ * @param text The message to show
262
+ */
263
+ private void showToast(final String text) {
264
+ final Activity activity = getActivity();
265
+ if (activity != null) {
266
+ activity.runOnUiThread(
267
+ new Runnable() {
268
+ @Override
269
+ public void run() {
270
+ Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
271
+ }
272
+ });
273
+ }
274
+ }
275
+
276
+ @Override
277
+ public View onCreateView(
278
+ final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
279
+ return inflater.inflate(layout, container, false);
280
+ }
281
+
282
+ @Override
283
+ public void onViewCreated(final View view, final Bundle savedInstanceState) {
284
+ textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
285
+ }
286
+
287
+ @Override
288
+ public void onActivityCreated(final Bundle savedInstanceState) {
289
+ super.onActivityCreated(savedInstanceState);
290
+ }
291
+
292
+ @Override
293
+ public void onResume() {
294
+ super.onResume();
295
+ startBackgroundThread();
296
+
297
+ // When the screen is turned off and turned back on, the SurfaceTexture is already
298
+ // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
299
+ // a camera and start preview from here (otherwise, we wait until the surface is ready in
300
+ // the SurfaceTextureListener).
301
+ if (textureView.isAvailable()) {
302
+ openCamera(textureView.getWidth(), textureView.getHeight());
303
+ } else {
304
+ textureView.setSurfaceTextureListener(surfaceTextureListener);
305
+ }
306
+ }
307
+
308
+ @Override
309
+ public void onPause() {
310
+ closeCamera();
311
+ stopBackgroundThread();
312
+ super.onPause();
313
+ }
314
+
315
+ public void setCamera(String cameraId) {
316
+ this.cameraId = cameraId;
317
+ }
318
+
319
+ /** Sets up member variables related to camera. */
320
+ private void setUpCameraOutputs() {
321
+ final Activity activity = getActivity();
322
+ final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
323
+ try {
324
+ final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
325
+
326
+ final StreamConfigurationMap map =
327
+ characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
328
+
329
+ sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
330
+
331
+ // Danger, W.R.! Attempting to use too large a preview size could exceed the camera
332
+ // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
333
+ // garbage capture data.
334
+ previewSize =
335
+ chooseOptimalSize(
336
+ map.getOutputSizes(SurfaceTexture.class),
337
+ inputSize.getWidth(),
338
+ inputSize.getHeight());
339
+
340
+ // We fit the aspect ratio of TextureView to the size of preview we picked.
341
+ final int orientation = getResources().getConfiguration().orientation;
342
+ if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
343
+ textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
344
+ } else {
345
+ textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
346
+ }
347
+ } catch (final CameraAccessException e) {
348
+ LOGGER.e(e, "Exception!");
349
+ } catch (final NullPointerException e) {
350
+ // Currently an NPE is thrown when the Camera2API is used but not supported on the
351
+ // device this code runs.
352
+ ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error))
353
+ .show(getChildFragmentManager(), FRAGMENT_DIALOG);
354
+ throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
355
+ }
356
+
357
+ cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
358
+ }
359
+
360
+ /** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */
361
+ private void openCamera(final int width, final int height) {
362
+ setUpCameraOutputs();
363
+ configureTransform(width, height);
364
+ final Activity activity = getActivity();
365
+ final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
366
+ try {
367
+ if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
368
+ throw new RuntimeException("Time out waiting to lock camera opening.");
369
+ }
370
+ manager.openCamera(cameraId, stateCallback, backgroundHandler);
371
+ } catch (final CameraAccessException e) {
372
+ LOGGER.e(e, "Exception!");
373
+ } catch (final InterruptedException e) {
374
+ throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
375
+ }
376
+ }
377
+
378
+ /** Closes the current {@link CameraDevice}. */
379
+ private void closeCamera() {
380
+ try {
381
+ cameraOpenCloseLock.acquire();
382
+ if (null != captureSession) {
383
+ captureSession.close();
384
+ captureSession = null;
385
+ }
386
+ if (null != cameraDevice) {
387
+ cameraDevice.close();
388
+ cameraDevice = null;
389
+ }
390
+ if (null != previewReader) {
391
+ previewReader.close();
392
+ previewReader = null;
393
+ }
394
+ } catch (final InterruptedException e) {
395
+ throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
396
+ } finally {
397
+ cameraOpenCloseLock.release();
398
+ }
399
+ }
400
+
401
+ /** Starts a background thread and its {@link Handler}. */
402
+ private void startBackgroundThread() {
403
+ backgroundThread = new HandlerThread("ImageListener");
404
+ backgroundThread.start();
405
+ backgroundHandler = new Handler(backgroundThread.getLooper());
406
+ }
407
+
408
+ /** Stops the background thread and its {@link Handler}. */
409
+ private void stopBackgroundThread() {
410
+ backgroundThread.quitSafely();
411
+ try {
412
+ backgroundThread.join();
413
+ backgroundThread = null;
414
+ backgroundHandler = null;
415
+ } catch (final InterruptedException e) {
416
+ LOGGER.e(e, "Exception!");
417
+ }
418
+ }
419
+
420
+ /** Creates a new {@link CameraCaptureSession} for camera preview. */
421
+ private void createCameraPreviewSession() {
422
+ try {
423
+ final SurfaceTexture texture = textureView.getSurfaceTexture();
424
+ assert texture != null;
425
+
426
+ // We configure the size of default buffer to be the size of camera preview we want.
427
+ texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
428
+
429
+ // This is the output Surface we need to start preview.
430
+ final Surface surface = new Surface(texture);
431
+
432
+ // We set up a CaptureRequest.Builder with the output Surface.
433
+ previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
434
+ previewRequestBuilder.addTarget(surface);
435
+
436
+ LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
437
+
438
+ // Create the reader for the preview frames.
439
+ previewReader =
440
+ ImageReader.newInstance(
441
+ previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
442
+
443
+ previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
444
+ previewRequestBuilder.addTarget(previewReader.getSurface());
445
+
446
+ // Here, we create a CameraCaptureSession for camera preview.
447
+ cameraDevice.createCaptureSession(
448
+ Arrays.asList(surface, previewReader.getSurface()),
449
+ new CameraCaptureSession.StateCallback() {
450
+
451
+ @Override
452
+ public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
453
+ // The camera is already closed
454
+ if (null == cameraDevice) {
455
+ return;
456
+ }
457
+
458
+ // When the session is ready, we start displaying the preview.
459
+ captureSession = cameraCaptureSession;
460
+ try {
461
+ // Auto focus should be continuous for camera preview.
462
+ previewRequestBuilder.set(
463
+ CaptureRequest.CONTROL_AF_MODE,
464
+ CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
465
+ // Flash is automatically enabled when necessary.
466
+ previewRequestBuilder.set(
467
+ CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
468
+
469
+ // Finally, we start displaying the camera preview.
470
+ previewRequest = previewRequestBuilder.build();
471
+ captureSession.setRepeatingRequest(
472
+ previewRequest, captureCallback, backgroundHandler);
473
+ } catch (final CameraAccessException e) {
474
+ LOGGER.e(e, "Exception!");
475
+ }
476
+ }
477
+
478
+ @Override
479
+ public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
480
+ showToast("Failed");
481
+ }
482
+ },
483
+ null);
484
+ } catch (final CameraAccessException e) {
485
+ LOGGER.e(e, "Exception!");
486
+ }
487
+ }
488
+
489
+ /**
490
+ * Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be
491
+ * called after the camera preview size is determined in setUpCameraOutputs and also the size of
492
+ * `mTextureView` is fixed.
493
+ *
494
+ * @param viewWidth The width of `mTextureView`
495
+ * @param viewHeight The height of `mTextureView`
496
+ */
497
+ private void configureTransform(final int viewWidth, final int viewHeight) {
498
+ final Activity activity = getActivity();
499
+ if (null == textureView || null == previewSize || null == activity) {
500
+ return;
501
+ }
502
+ final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
503
+ final Matrix matrix = new Matrix();
504
+ final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
505
+ final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
506
+ final float centerX = viewRect.centerX();
507
+ final float centerY = viewRect.centerY();
508
+ if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
509
+ bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
510
+ matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
511
+ final float scale =
512
+ Math.max(
513
+ (float) viewHeight / previewSize.getHeight(),
514
+ (float) viewWidth / previewSize.getWidth());
515
+ matrix.postScale(scale, scale, centerX, centerY);
516
+ matrix.postRotate(90 * (rotation - 2), centerX, centerY);
517
+ } else if (Surface.ROTATION_180 == rotation) {
518
+ matrix.postRotate(180, centerX, centerY);
519
+ }
520
+ textureView.setTransform(matrix);
521
+ }
522
+
523
+ /**
524
+ * Callback for Activities to use to initialize their data once the selected preview size is
525
+ * known.
526
+ */
527
+ public interface ConnectionCallback {
528
+ void onPreviewSizeChosen(Size size, int cameraRotation);
529
+ }
530
+
531
+ /** Compares two {@code Size}s based on their areas. */
532
+ static class CompareSizesByArea implements Comparator<Size> {
533
+ @Override
534
+ public int compare(final Size lhs, final Size rhs) {
535
+ // We cast here to ensure the multiplications won't overflow
536
+ return Long.signum(
537
+ (long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
538
+ }
539
+ }
540
+
541
+ /** Shows an error message dialog. */
542
+ public static class ErrorDialog extends DialogFragment {
543
+ private static final String ARG_MESSAGE = "message";
544
+
545
+ public static ErrorDialog newInstance(final String message) {
546
+ final ErrorDialog dialog = new ErrorDialog();
547
+ final Bundle args = new Bundle();
548
+ args.putString(ARG_MESSAGE, message);
549
+ dialog.setArguments(args);
550
+ return dialog;
551
+ }
552
+
553
+ @Override
554
+ public Dialog onCreateDialog(final Bundle savedInstanceState) {
555
+ final Activity activity = getActivity();
556
+ return new AlertDialog.Builder(activity)
557
+ .setMessage(getArguments().getString(ARG_MESSAGE))
558
+ .setPositiveButton(
559
+ android.R.string.ok,
560
+ new DialogInterface.OnClickListener() {
561
+ @Override
562
+ public void onClick(final DialogInterface dialogInterface, final int i) {
563
+ activity.finish();
564
+ }
565
+ })
566
+ .create();
567
+ }
568
+ }
569
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/DetectorActivity.java ADDED
@@ -0,0 +1,266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ package org.tensorflow.lite.examples.detection;
18
+
19
+ import android.graphics.Bitmap;
20
+ import android.graphics.Bitmap.Config;
21
+ import android.graphics.Canvas;
22
+ import android.graphics.Color;
23
+ import android.graphics.Matrix;
24
+ import android.graphics.Paint;
25
+ import android.graphics.Paint.Style;
26
+ import android.graphics.RectF;
27
+ import android.graphics.Typeface;
28
+ import android.media.ImageReader.OnImageAvailableListener;
29
+ import android.os.SystemClock;
30
+ import android.util.Log;
31
+ import android.util.Size;
32
+ import android.util.TypedValue;
33
+ import android.widget.Toast;
34
+
35
+ import java.io.IOException;
36
+ import java.util.LinkedList;
37
+ import java.util.List;
38
+
39
+ import org.tensorflow.lite.examples.detection.customview.OverlayView;
40
+ import org.tensorflow.lite.examples.detection.customview.OverlayView.DrawCallback;
41
+ import org.tensorflow.lite.examples.detection.env.BorderedText;
42
+ import org.tensorflow.lite.examples.detection.env.ImageUtils;
43
+ import org.tensorflow.lite.examples.detection.env.Logger;
44
+ import org.tensorflow.lite.examples.detection.tflite.Classifier;
45
+ import org.tensorflow.lite.examples.detection.tflite.YoloV4Classifier;
46
+ import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker;
47
+
48
+ /**
49
+ * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
50
+ * objects.
51
+ */
52
+ public class DetectorActivity extends CameraActivity implements OnImageAvailableListener {
53
+ private static final Logger LOGGER = new Logger();
54
+
55
+ private static final int TF_OD_API_INPUT_SIZE = 416;
56
+ private static final boolean TF_OD_API_IS_QUANTIZED = false;
57
+ private static final String TF_OD_API_MODEL_FILE = "yolov4-416-fp32.tflite";
58
+
59
+ private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/coco.txt";
60
+
61
+ private static final DetectorMode MODE = DetectorMode.TF_OD_API;
62
+ private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
63
+ private static final boolean MAINTAIN_ASPECT = false;
64
+ private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
65
+ private static final boolean SAVE_PREVIEW_BITMAP = false;
66
+ private static final float TEXT_SIZE_DIP = 10;
67
+ OverlayView trackingOverlay;
68
+ private Integer sensorOrientation;
69
+
70
+ private Classifier detector;
71
+
72
+ private long lastProcessingTimeMs;
73
+ private Bitmap rgbFrameBitmap = null;
74
+ private Bitmap croppedBitmap = null;
75
+ private Bitmap cropCopyBitmap = null;
76
+
77
+ private boolean computingDetection = false;
78
+
79
+ private long timestamp = 0;
80
+
81
+ private Matrix frameToCropTransform;
82
+ private Matrix cropToFrameTransform;
83
+
84
+ private MultiBoxTracker tracker;
85
+
86
+ private BorderedText borderedText;
87
+
88
+ @Override
89
+ public void onPreviewSizeChosen(final Size size, final int rotation) {
90
+ final float textSizePx =
91
+ TypedValue.applyDimension(
92
+ TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
93
+ borderedText = new BorderedText(textSizePx);
94
+ borderedText.setTypeface(Typeface.MONOSPACE);
95
+
96
+ tracker = new MultiBoxTracker(this);
97
+
98
+ int cropSize = TF_OD_API_INPUT_SIZE;
99
+
100
+ try {
101
+ detector =
102
+ YoloV4Classifier.create(
103
+ getAssets(),
104
+ TF_OD_API_MODEL_FILE,
105
+ TF_OD_API_LABELS_FILE,
106
+ TF_OD_API_IS_QUANTIZED);
107
+ // detector = TFLiteObjectDetectionAPIModel.create(
108
+ // getAssets(),
109
+ // TF_OD_API_MODEL_FILE,
110
+ // TF_OD_API_LABELS_FILE,
111
+ // TF_OD_API_INPUT_SIZE,
112
+ // TF_OD_API_IS_QUANTIZED);
113
+ cropSize = TF_OD_API_INPUT_SIZE;
114
+ } catch (final IOException e) {
115
+ e.printStackTrace();
116
+ LOGGER.e(e, "Exception initializing classifier!");
117
+ Toast toast =
118
+ Toast.makeText(
119
+ getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
120
+ toast.show();
121
+ finish();
122
+ }
123
+
124
+ previewWidth = size.getWidth();
125
+ previewHeight = size.getHeight();
126
+
127
+ sensorOrientation = rotation - getScreenOrientation();
128
+ LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
129
+
130
+ LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
131
+ rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
132
+ croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
133
+
134
+ frameToCropTransform =
135
+ ImageUtils.getTransformationMatrix(
136
+ previewWidth, previewHeight,
137
+ cropSize, cropSize,
138
+ sensorOrientation, MAINTAIN_ASPECT);
139
+
140
+ cropToFrameTransform = new Matrix();
141
+ frameToCropTransform.invert(cropToFrameTransform);
142
+
143
+ trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
144
+ trackingOverlay.addCallback(
145
+ new DrawCallback() {
146
+ @Override
147
+ public void drawCallback(final Canvas canvas) {
148
+ tracker.draw(canvas);
149
+ if (isDebug()) {
150
+ tracker.drawDebug(canvas);
151
+ }
152
+ }
153
+ });
154
+
155
+ tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation);
156
+ }
157
+
158
+ @Override
159
+ protected void processImage() {
160
+ ++timestamp;
161
+ final long currTimestamp = timestamp;
162
+ trackingOverlay.postInvalidate();
163
+
164
+ // No mutex needed as this method is not reentrant.
165
+ if (computingDetection) {
166
+ readyForNextImage();
167
+ return;
168
+ }
169
+ computingDetection = true;
170
+ LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
171
+
172
+ rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
173
+
174
+ readyForNextImage();
175
+
176
+ final Canvas canvas = new Canvas(croppedBitmap);
177
+ canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
178
+ // For examining the actual TF input.
179
+ if (SAVE_PREVIEW_BITMAP) {
180
+ ImageUtils.saveBitmap(croppedBitmap);
181
+ }
182
+
183
+ runInBackground(
184
+ new Runnable() {
185
+ @Override
186
+ public void run() {
187
+ LOGGER.i("Running detection on image " + currTimestamp);
188
+ final long startTime = SystemClock.uptimeMillis();
189
+ final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
190
+ lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
191
+
192
+ Log.e("CHECK", "run: " + results.size());
193
+
194
+ cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
195
+ final Canvas canvas = new Canvas(cropCopyBitmap);
196
+ final Paint paint = new Paint();
197
+ paint.setColor(Color.RED);
198
+ paint.setStyle(Style.STROKE);
199
+ paint.setStrokeWidth(2.0f);
200
+
201
+ float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
202
+ switch (MODE) {
203
+ case TF_OD_API:
204
+ minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
205
+ break;
206
+ }
207
+
208
+ final List<Classifier.Recognition> mappedRecognitions =
209
+ new LinkedList<Classifier.Recognition>();
210
+
211
+ for (final Classifier.Recognition result : results) {
212
+ final RectF location = result.getLocation();
213
+ if (location != null && result.getConfidence() >= minimumConfidence) {
214
+ canvas.drawRect(location, paint);
215
+
216
+ cropToFrameTransform.mapRect(location);
217
+
218
+ result.setLocation(location);
219
+ mappedRecognitions.add(result);
220
+ }
221
+ }
222
+
223
+ tracker.trackResults(mappedRecognitions, currTimestamp);
224
+ trackingOverlay.postInvalidate();
225
+
226
+ computingDetection = false;
227
+
228
+ runOnUiThread(
229
+ new Runnable() {
230
+ @Override
231
+ public void run() {
232
+ showFrameInfo(previewWidth + "x" + previewHeight);
233
+ showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
234
+ showInference(lastProcessingTimeMs + "ms");
235
+ }
236
+ });
237
+ }
238
+ });
239
+ }
240
+
241
+ @Override
242
+ protected int getLayoutId() {
243
+ return R.layout.tfe_od_camera_connection_fragment_tracking;
244
+ }
245
+
246
+ @Override
247
+ protected Size getDesiredPreviewFrameSize() {
248
+ return DESIRED_PREVIEW_SIZE;
249
+ }
250
+
251
+ // Which detection model to use: by default uses Tensorflow Object Detection API frozen
252
+ // checkpoints.
253
+ private enum DetectorMode {
254
+ TF_OD_API;
255
+ }
256
+
257
+ @Override
258
+ protected void setUseNNAPI(final boolean isChecked) {
259
+ runInBackground(() -> detector.setUseNNAPI(isChecked));
260
+ }
261
+
262
+ @Override
263
+ protected void setNumThreads(final int numThreads) {
264
+ runInBackground(() -> detector.setNumThreads(numThreads));
265
+ }
266
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/LegacyCameraConnectionFragment.java ADDED
@@ -0,0 +1,199 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package org.tensorflow.lite.examples.detection;
2
+
3
+ /*
4
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
5
+ *
6
+ * Licensed under the Apache License, Version 2.0 (the "License");
7
+ * you may not use this file except in compliance with the License.
8
+ * You may obtain a copy of the License at
9
+ *
10
+ * http://www.apache.org/licenses/LICENSE-2.0
11
+ *
12
+ * Unless required by applicable law or agreed to in writing, software
13
+ * distributed under the License is distributed on an "AS IS" BASIS,
14
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ * See the License for the specific language governing permissions and
16
+ * limitations under the License.
17
+ */
18
+
19
+ import android.app.Fragment;
20
+ import android.graphics.SurfaceTexture;
21
+ import android.hardware.Camera;
22
+ import android.hardware.Camera.CameraInfo;
23
+ import android.os.Bundle;
24
+ import android.os.Handler;
25
+ import android.os.HandlerThread;
26
+ import android.util.Size;
27
+ import android.util.SparseIntArray;
28
+ import android.view.LayoutInflater;
29
+ import android.view.Surface;
30
+ import android.view.TextureView;
31
+ import android.view.View;
32
+ import android.view.ViewGroup;
33
+ import java.io.IOException;
34
+ import java.util.List;
35
+ import org.tensorflow.lite.examples.detection.customview.AutoFitTextureView;
36
+ import org.tensorflow.lite.examples.detection.env.ImageUtils;
37
+ import org.tensorflow.lite.examples.detection.env.Logger;
38
+
39
+ public class LegacyCameraConnectionFragment extends Fragment {
40
+ private static final Logger LOGGER = new Logger();
41
+ /** Conversion from screen rotation to JPEG orientation. */
42
+ private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
43
+
44
+ static {
45
+ ORIENTATIONS.append(Surface.ROTATION_0, 90);
46
+ ORIENTATIONS.append(Surface.ROTATION_90, 0);
47
+ ORIENTATIONS.append(Surface.ROTATION_180, 270);
48
+ ORIENTATIONS.append(Surface.ROTATION_270, 180);
49
+ }
50
+
51
+ private Camera camera;
52
+ private Camera.PreviewCallback imageListener;
53
+ private Size desiredSize;
54
+ /** The layout identifier to inflate for this Fragment. */
55
+ private int layout;
56
+ /** An {@link AutoFitTextureView} for camera preview. */
57
+ private AutoFitTextureView textureView;
58
+ /**
59
+ * {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
60
+ * TextureView}.
61
+ */
62
+ private final TextureView.SurfaceTextureListener surfaceTextureListener =
63
+ new TextureView.SurfaceTextureListener() {
64
+ @Override
65
+ public void onSurfaceTextureAvailable(
66
+ final SurfaceTexture texture, final int width, final int height) {
67
+
68
+ int index = getCameraId();
69
+ camera = Camera.open(index);
70
+
71
+ try {
72
+ Camera.Parameters parameters = camera.getParameters();
73
+ List<String> focusModes = parameters.getSupportedFocusModes();
74
+ if (focusModes != null
75
+ && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
76
+ parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
77
+ }
78
+ List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
79
+ Size[] sizes = new Size[cameraSizes.size()];
80
+ int i = 0;
81
+ for (Camera.Size size : cameraSizes) {
82
+ sizes[i++] = new Size(size.width, size.height);
83
+ }
84
+ Size previewSize =
85
+ CameraConnectionFragment.chooseOptimalSize(
86
+ sizes, desiredSize.getWidth(), desiredSize.getHeight());
87
+ parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
88
+ camera.setDisplayOrientation(90);
89
+ camera.setParameters(parameters);
90
+ camera.setPreviewTexture(texture);
91
+ } catch (IOException exception) {
92
+ camera.release();
93
+ }
94
+
95
+ camera.setPreviewCallbackWithBuffer(imageListener);
96
+ Camera.Size s = camera.getParameters().getPreviewSize();
97
+ camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
98
+
99
+ textureView.setAspectRatio(s.height, s.width);
100
+
101
+ camera.startPreview();
102
+ }
103
+
104
+ @Override
105
+ public void onSurfaceTextureSizeChanged(
106
+ final SurfaceTexture texture, final int width, final int height) {}
107
+
108
+ @Override
109
+ public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
110
+ return true;
111
+ }
112
+
113
+ @Override
114
+ public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
115
+ };
116
+ /** An additional thread for running tasks that shouldn't block the UI. */
117
+ private HandlerThread backgroundThread;
118
+
119
+ public LegacyCameraConnectionFragment(
120
+ final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
121
+ this.imageListener = imageListener;
122
+ this.layout = layout;
123
+ this.desiredSize = desiredSize;
124
+ }
125
+
126
+ @Override
127
+ public View onCreateView(
128
+ final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
129
+ return inflater.inflate(layout, container, false);
130
+ }
131
+
132
+ @Override
133
+ public void onViewCreated(final View view, final Bundle savedInstanceState) {
134
+ textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
135
+ }
136
+
137
+ @Override
138
+ public void onActivityCreated(final Bundle savedInstanceState) {
139
+ super.onActivityCreated(savedInstanceState);
140
+ }
141
+
142
+ @Override
143
+ public void onResume() {
144
+ super.onResume();
145
+ startBackgroundThread();
146
+ // When the screen is turned off and turned back on, the SurfaceTexture is already
147
+ // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
148
+ // a camera and start preview from here (otherwise, we wait until the surface is ready in
149
+ // the SurfaceTextureListener).
150
+
151
+ if (textureView.isAvailable()) {
152
+ camera.startPreview();
153
+ } else {
154
+ textureView.setSurfaceTextureListener(surfaceTextureListener);
155
+ }
156
+ }
157
+
158
+ @Override
159
+ public void onPause() {
160
+ stopCamera();
161
+ stopBackgroundThread();
162
+ super.onPause();
163
+ }
164
+
165
+ /** Starts a background thread and its {@link Handler}. */
166
+ private void startBackgroundThread() {
167
+ backgroundThread = new HandlerThread("CameraBackground");
168
+ backgroundThread.start();
169
+ }
170
+
171
+ /** Stops the background thread and its {@link Handler}. */
172
+ private void stopBackgroundThread() {
173
+ backgroundThread.quitSafely();
174
+ try {
175
+ backgroundThread.join();
176
+ backgroundThread = null;
177
+ } catch (final InterruptedException e) {
178
+ LOGGER.e(e, "Exception!");
179
+ }
180
+ }
181
+
182
+ protected void stopCamera() {
183
+ if (camera != null) {
184
+ camera.stopPreview();
185
+ camera.setPreviewCallback(null);
186
+ camera.release();
187
+ camera = null;
188
+ }
189
+ }
190
+
191
+ private int getCameraId() {
192
+ CameraInfo ci = new CameraInfo();
193
+ for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
194
+ Camera.getCameraInfo(i, ci);
195
+ if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
196
+ }
197
+ return -1; // No camera found
198
+ }
199
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/MainActivity.java ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package org.tensorflow.lite.examples.detection;
2
+
3
+ import androidx.appcompat.app.AppCompatActivity;
4
+
5
+ import android.content.Context;
6
+ import android.content.Intent;
7
+ import android.graphics.Bitmap;
8
+ import android.graphics.Canvas;
9
+ import android.graphics.Color;
10
+ import android.graphics.Matrix;
11
+ import android.graphics.Paint;
12
+ import android.graphics.RectF;
13
+ import android.os.Bundle;
14
+ import android.os.Handler;
15
+ import android.util.Log;
16
+ import android.view.View;
17
+ import android.widget.Button;
18
+ import android.widget.ImageView;
19
+ import android.widget.Toast;
20
+
21
+ import org.tensorflow.lite.examples.detection.customview.OverlayView;
22
+ import org.tensorflow.lite.examples.detection.env.ImageUtils;
23
+ import org.tensorflow.lite.examples.detection.env.Logger;
24
+ import org.tensorflow.lite.examples.detection.env.Utils;
25
+ import org.tensorflow.lite.examples.detection.tflite.Classifier;
26
+ import org.tensorflow.lite.examples.detection.tflite.YoloV4Classifier;
27
+ import org.tensorflow.lite.examples.detection.tracking.MultiBoxTracker;
28
+
29
+ import java.io.IOException;
30
+ import java.util.LinkedList;
31
+ import java.util.List;
32
+
33
+ public class MainActivity extends AppCompatActivity {
34
+
35
+ public static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
36
+
37
+ @Override
38
+ protected void onCreate(Bundle savedInstanceState) {
39
+ super.onCreate(savedInstanceState);
40
+ setContentView(R.layout.activity_main);
41
+
42
+ cameraButton = findViewById(R.id.cameraButton);
43
+ detectButton = findViewById(R.id.detectButton);
44
+ imageView = findViewById(R.id.imageView);
45
+
46
+ cameraButton.setOnClickListener(v -> startActivity(new Intent(MainActivity.this, DetectorActivity.class)));
47
+
48
+ detectButton.setOnClickListener(v -> {
49
+ Handler handler = new Handler();
50
+
51
+ new Thread(() -> {
52
+ final List<Classifier.Recognition> results = detector.recognizeImage(cropBitmap);
53
+ handler.post(new Runnable() {
54
+ @Override
55
+ public void run() {
56
+ handleResult(cropBitmap, results);
57
+ }
58
+ });
59
+ }).start();
60
+
61
+ });
62
+ this.sourceBitmap = Utils.getBitmapFromAsset(MainActivity.this, "kite.jpg");
63
+
64
+ this.cropBitmap = Utils.processBitmap(sourceBitmap, TF_OD_API_INPUT_SIZE);
65
+
66
+ this.imageView.setImageBitmap(cropBitmap);
67
+
68
+ initBox();
69
+ }
70
+
71
+ private static final Logger LOGGER = new Logger();
72
+
73
+ public static final int TF_OD_API_INPUT_SIZE = 416;
74
+
75
+ private static final boolean TF_OD_API_IS_QUANTIZED = false;
76
+
77
+ private static final String TF_OD_API_MODEL_FILE = "yolov4-416-fp32.tflite";
78
+
79
+ private static final String TF_OD_API_LABELS_FILE = "file:///android_asset/coco.txt";
80
+
81
+ // Minimum detection confidence to track a detection.
82
+ private static final boolean MAINTAIN_ASPECT = false;
83
+ private Integer sensorOrientation = 90;
84
+
85
+ private Classifier detector;
86
+
87
+ private Matrix frameToCropTransform;
88
+ private Matrix cropToFrameTransform;
89
+ private MultiBoxTracker tracker;
90
+ private OverlayView trackingOverlay;
91
+
92
+ protected int previewWidth = 0;
93
+ protected int previewHeight = 0;
94
+
95
+ private Bitmap sourceBitmap;
96
+ private Bitmap cropBitmap;
97
+
98
+ private Button cameraButton, detectButton;
99
+ private ImageView imageView;
100
+
101
+ private void initBox() {
102
+ previewHeight = TF_OD_API_INPUT_SIZE;
103
+ previewWidth = TF_OD_API_INPUT_SIZE;
104
+ frameToCropTransform =
105
+ ImageUtils.getTransformationMatrix(
106
+ previewWidth, previewHeight,
107
+ TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE,
108
+ sensorOrientation, MAINTAIN_ASPECT);
109
+
110
+ cropToFrameTransform = new Matrix();
111
+ frameToCropTransform.invert(cropToFrameTransform);
112
+
113
+ tracker = new MultiBoxTracker(this);
114
+ trackingOverlay = findViewById(R.id.tracking_overlay);
115
+ trackingOverlay.addCallback(
116
+ canvas -> tracker.draw(canvas));
117
+
118
+ tracker.setFrameConfiguration(TF_OD_API_INPUT_SIZE, TF_OD_API_INPUT_SIZE, sensorOrientation);
119
+
120
+ try {
121
+ detector =
122
+ YoloV4Classifier.create(
123
+ getAssets(),
124
+ TF_OD_API_MODEL_FILE,
125
+ TF_OD_API_LABELS_FILE,
126
+ TF_OD_API_IS_QUANTIZED);
127
+ } catch (final IOException e) {
128
+ e.printStackTrace();
129
+ LOGGER.e(e, "Exception initializing classifier!");
130
+ Toast toast =
131
+ Toast.makeText(
132
+ getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
133
+ toast.show();
134
+ finish();
135
+ }
136
+ }
137
+
138
+ private void handleResult(Bitmap bitmap, List<Classifier.Recognition> results) {
139
+ final Canvas canvas = new Canvas(bitmap);
140
+ final Paint paint = new Paint();
141
+ paint.setColor(Color.RED);
142
+ paint.setStyle(Paint.Style.STROKE);
143
+ paint.setStrokeWidth(2.0f);
144
+
145
+ final List<Classifier.Recognition> mappedRecognitions =
146
+ new LinkedList<Classifier.Recognition>();
147
+
148
+ for (final Classifier.Recognition result : results) {
149
+ final RectF location = result.getLocation();
150
+ if (location != null && result.getConfidence() >= MINIMUM_CONFIDENCE_TF_OD_API) {
151
+ canvas.drawRect(location, paint);
152
+ // cropToFrameTransform.mapRect(location);
153
+ //
154
+ // result.setLocation(location);
155
+ // mappedRecognitions.add(result);
156
+ }
157
+ }
158
+ // tracker.trackResults(mappedRecognitions, new Random().nextInt());
159
+ // trackingOverlay.postInvalidate();
160
+ imageView.setImageBitmap(bitmap);
161
+ }
162
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/AutoFitTextureView.java ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /*
2
+ * Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ package org.tensorflow.lite.examples.detection.customview;
18
+
19
+ import android.content.Context;
20
+ import android.util.AttributeSet;
21
+ import android.view.TextureView;
22
+
23
+ /** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
24
+ public class AutoFitTextureView extends TextureView {
25
+ private int ratioWidth = 0;
26
+ private int ratioHeight = 0;
27
+
28
+ public AutoFitTextureView(final Context context) {
29
+ this(context, null);
30
+ }
31
+
32
+ public AutoFitTextureView(final Context context, final AttributeSet attrs) {
33
+ this(context, attrs, 0);
34
+ }
35
+
36
+ public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
37
+ super(context, attrs, defStyle);
38
+ }
39
+
40
+ /**
41
+ * Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
42
+ * calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
43
+ * calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
44
+ *
45
+ * @param width Relative horizontal size
46
+ * @param height Relative vertical size
47
+ */
48
+ public void setAspectRatio(final int width, final int height) {
49
+ if (width < 0 || height < 0) {
50
+ throw new IllegalArgumentException("Size cannot be negative.");
51
+ }
52
+ ratioWidth = width;
53
+ ratioHeight = height;
54
+ requestLayout();
55
+ }
56
+
57
+ @Override
58
+ protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
59
+ super.onMeasure(widthMeasureSpec, heightMeasureSpec);
60
+ final int width = MeasureSpec.getSize(widthMeasureSpec);
61
+ final int height = MeasureSpec.getSize(heightMeasureSpec);
62
+ if (0 == ratioWidth || 0 == ratioHeight) {
63
+ setMeasuredDimension(width, height);
64
+ } else {
65
+ if (width < height * ratioWidth / ratioHeight) {
66
+ setMeasuredDimension(width, width * ratioHeight / ratioWidth);
67
+ } else {
68
+ setMeasuredDimension(height * ratioWidth / ratioHeight, height);
69
+ }
70
+ }
71
+ }
72
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/OverlayView.java ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.customview;
17
+
18
+ import android.content.Context;
19
+ import android.graphics.Canvas;
20
+ import android.util.AttributeSet;
21
+ import android.view.View;
22
+ import java.util.LinkedList;
23
+ import java.util.List;
24
+
25
+ /** A simple View providing a render callback to other classes. */
26
+ public class OverlayView extends View {
27
+ private final List<DrawCallback> callbacks = new LinkedList<DrawCallback>();
28
+
29
+ public OverlayView(final Context context, final AttributeSet attrs) {
30
+ super(context, attrs);
31
+ }
32
+
33
+ public void addCallback(final DrawCallback callback) {
34
+ callbacks.add(callback);
35
+ }
36
+
37
+ @Override
38
+ public synchronized void draw(final Canvas canvas) {
39
+ for (final DrawCallback callback : callbacks) {
40
+ callback.drawCallback(canvas);
41
+ }
42
+ }
43
+
44
+ /** Interface defining the callback for client classes. */
45
+ public interface DrawCallback {
46
+ public void drawCallback(final Canvas canvas);
47
+ }
48
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/RecognitionScoreView.java ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.customview;
17
+
18
+ import android.content.Context;
19
+ import android.graphics.Canvas;
20
+ import android.graphics.Paint;
21
+ import android.util.AttributeSet;
22
+ import android.util.TypedValue;
23
+ import android.view.View;
24
+ import java.util.List;
25
+ import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
26
+
27
+ public class RecognitionScoreView extends View implements ResultsView {
28
+ private static final float TEXT_SIZE_DIP = 14;
29
+ private final float textSizePx;
30
+ private final Paint fgPaint;
31
+ private final Paint bgPaint;
32
+ private List<Recognition> results;
33
+
34
+ public RecognitionScoreView(final Context context, final AttributeSet set) {
35
+ super(context, set);
36
+
37
+ textSizePx =
38
+ TypedValue.applyDimension(
39
+ TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
40
+ fgPaint = new Paint();
41
+ fgPaint.setTextSize(textSizePx);
42
+
43
+ bgPaint = new Paint();
44
+ bgPaint.setColor(0xcc4285f4);
45
+ }
46
+
47
+ @Override
48
+ public void setResults(final List<Recognition> results) {
49
+ this.results = results;
50
+ postInvalidate();
51
+ }
52
+
53
+ @Override
54
+ public void onDraw(final Canvas canvas) {
55
+ final int x = 10;
56
+ int y = (int) (fgPaint.getTextSize() * 1.5f);
57
+
58
+ canvas.drawPaint(bgPaint);
59
+
60
+ if (results != null) {
61
+ for (final Recognition recog : results) {
62
+ canvas.drawText(recog.getTitle() + ": " + recog.getConfidence(), x, y, fgPaint);
63
+ y += (int) (fgPaint.getTextSize() * 1.5f);
64
+ }
65
+ }
66
+ }
67
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/customview/ResultsView.java ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.customview;
17
+
18
+ import java.util.List;
19
+ import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
20
+
21
+ public interface ResultsView {
22
+ public void setResults(final List<Recognition> results);
23
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/env/BorderedText.java ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.env;
17
+
18
+ import android.graphics.Canvas;
19
+ import android.graphics.Color;
20
+ import android.graphics.Paint;
21
+ import android.graphics.Paint.Align;
22
+ import android.graphics.Paint.Style;
23
+ import android.graphics.Rect;
24
+ import android.graphics.Typeface;
25
+ import java.util.Vector;
26
+
27
+ /** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */
28
+ public class BorderedText {
29
+ private final Paint interiorPaint;
30
+ private final Paint exteriorPaint;
31
+
32
+ private final float textSize;
33
+
34
+ /**
35
+ * Creates a left-aligned bordered text object with a white interior, and a black exterior with
36
+ * the specified text size.
37
+ *
38
+ * @param textSize text size in pixels
39
+ */
40
+ public BorderedText(final float textSize) {
41
+ this(Color.WHITE, Color.BLACK, textSize);
42
+ }
43
+
44
+ /**
45
+ * Create a bordered text object with the specified interior and exterior colors, text size and
46
+ * alignment.
47
+ *
48
+ * @param interiorColor the interior text color
49
+ * @param exteriorColor the exterior text color
50
+ * @param textSize text size in pixels
51
+ */
52
+ public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
53
+ interiorPaint = new Paint();
54
+ interiorPaint.setTextSize(textSize);
55
+ interiorPaint.setColor(interiorColor);
56
+ interiorPaint.setStyle(Style.FILL);
57
+ interiorPaint.setAntiAlias(false);
58
+ interiorPaint.setAlpha(255);
59
+
60
+ exteriorPaint = new Paint();
61
+ exteriorPaint.setTextSize(textSize);
62
+ exteriorPaint.setColor(exteriorColor);
63
+ exteriorPaint.setStyle(Style.FILL_AND_STROKE);
64
+ exteriorPaint.setStrokeWidth(textSize / 8);
65
+ exteriorPaint.setAntiAlias(false);
66
+ exteriorPaint.setAlpha(255);
67
+
68
+ this.textSize = textSize;
69
+ }
70
+
71
+ public void setTypeface(Typeface typeface) {
72
+ interiorPaint.setTypeface(typeface);
73
+ exteriorPaint.setTypeface(typeface);
74
+ }
75
+
76
+ public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
77
+ canvas.drawText(text, posX, posY, exteriorPaint);
78
+ canvas.drawText(text, posX, posY, interiorPaint);
79
+ }
80
+
81
+ public void drawText(
82
+ final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
83
+
84
+ float width = exteriorPaint.measureText(text);
85
+ float textSize = exteriorPaint.getTextSize();
86
+ Paint paint = new Paint(bgPaint);
87
+ paint.setStyle(Paint.Style.FILL);
88
+ paint.setAlpha(160);
89
+ canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint);
90
+
91
+ canvas.drawText(text, posX, (posY + textSize), interiorPaint);
92
+ }
93
+
94
+ public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
95
+ int lineNum = 0;
96
+ for (final String line : lines) {
97
+ drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
98
+ ++lineNum;
99
+ }
100
+ }
101
+
102
+ public void setInteriorColor(final int color) {
103
+ interiorPaint.setColor(color);
104
+ }
105
+
106
+ public void setExteriorColor(final int color) {
107
+ exteriorPaint.setColor(color);
108
+ }
109
+
110
+ public float getTextSize() {
111
+ return textSize;
112
+ }
113
+
114
+ public void setAlpha(final int alpha) {
115
+ interiorPaint.setAlpha(alpha);
116
+ exteriorPaint.setAlpha(alpha);
117
+ }
118
+
119
+ public void getTextBounds(
120
+ final String line, final int index, final int count, final Rect lineBounds) {
121
+ interiorPaint.getTextBounds(line, index, count, lineBounds);
122
+ }
123
+
124
+ public void setTextAlign(final Align align) {
125
+ interiorPaint.setTextAlign(align);
126
+ exteriorPaint.setTextAlign(align);
127
+ }
128
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/env/ImageUtils.java ADDED
@@ -0,0 +1,219 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.env;
17
+
18
+ import android.graphics.Bitmap;
19
+ import android.graphics.Matrix;
20
+ import android.os.Environment;
21
+ import java.io.File;
22
+ import java.io.FileOutputStream;
23
+
24
+ /** Utility class for manipulating images. */
25
+ public class ImageUtils {
26
+ // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
27
+ // are normalized to eight bits.
28
+ static final int kMaxChannelValue = 262143;
29
+
30
+ @SuppressWarnings("unused")
31
+ private static final Logger LOGGER = new Logger();
32
+
33
+ /**
34
+ * Utility method to compute the allocated size in bytes of a YUV420SP image of the given
35
+ * dimensions.
36
+ */
37
+ public static int getYUVByteSize(final int width, final int height) {
38
+ // The luminance plane requires 1 byte per pixel.
39
+ final int ySize = width * height;
40
+
41
+ // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
42
+ // Each 2x2 block takes 2 bytes to encode, one each for U and V.
43
+ final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
44
+
45
+ return ySize + uvSize;
46
+ }
47
+
48
+ /**
49
+ * Saves a Bitmap object to disk for analysis.
50
+ *
51
+ * @param bitmap The bitmap to save.
52
+ */
53
+ public static void saveBitmap(final Bitmap bitmap) {
54
+ saveBitmap(bitmap, "preview.png");
55
+ }
56
+
57
+ /**
58
+ * Saves a Bitmap object to disk for analysis.
59
+ *
60
+ * @param bitmap The bitmap to save.
61
+ * @param filename The location to save the bitmap to.
62
+ */
63
+ public static void saveBitmap(final Bitmap bitmap, final String filename) {
64
+ final String root =
65
+ Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
66
+ LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
67
+ final File myDir = new File(root);
68
+
69
+ if (!myDir.mkdirs()) {
70
+ LOGGER.i("Make dir failed");
71
+ }
72
+
73
+ final String fname = filename;
74
+ final File file = new File(myDir, fname);
75
+ if (file.exists()) {
76
+ file.delete();
77
+ }
78
+ try {
79
+ final FileOutputStream out = new FileOutputStream(file);
80
+ bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
81
+ out.flush();
82
+ out.close();
83
+ } catch (final Exception e) {
84
+ LOGGER.e(e, "Exception!");
85
+ }
86
+ }
87
+
88
+ public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) {
89
+ final int frameSize = width * height;
90
+ for (int j = 0, yp = 0; j < height; j++) {
91
+ int uvp = frameSize + (j >> 1) * width;
92
+ int u = 0;
93
+ int v = 0;
94
+
95
+ for (int i = 0; i < width; i++, yp++) {
96
+ int y = 0xff & input[yp];
97
+ if ((i & 1) == 0) {
98
+ v = 0xff & input[uvp++];
99
+ u = 0xff & input[uvp++];
100
+ }
101
+
102
+ output[yp] = YUV2RGB(y, u, v);
103
+ }
104
+ }
105
+ }
106
+
107
+ private static int YUV2RGB(int y, int u, int v) {
108
+ // Adjust and check YUV values
109
+ y = (y - 16) < 0 ? 0 : (y - 16);
110
+ u -= 128;
111
+ v -= 128;
112
+
113
+ // This is the floating point equivalent. We do the conversion in integer
114
+ // because some Android devices do not have floating point in hardware.
115
+ // nR = (int)(1.164 * nY + 2.018 * nU);
116
+ // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
117
+ // nB = (int)(1.164 * nY + 1.596 * nV);
118
+ int y1192 = 1192 * y;
119
+ int r = (y1192 + 1634 * v);
120
+ int g = (y1192 - 833 * v - 400 * u);
121
+ int b = (y1192 + 2066 * u);
122
+
123
+ // Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
124
+ r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
125
+ g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
126
+ b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
127
+
128
+ return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
129
+ }
130
+
131
+ public static void convertYUV420ToARGB8888(
132
+ byte[] yData,
133
+ byte[] uData,
134
+ byte[] vData,
135
+ int width,
136
+ int height,
137
+ int yRowStride,
138
+ int uvRowStride,
139
+ int uvPixelStride,
140
+ int[] out) {
141
+ int yp = 0;
142
+ for (int j = 0; j < height; j++) {
143
+ int pY = yRowStride * j;
144
+ int pUV = uvRowStride * (j >> 1);
145
+
146
+ for (int i = 0; i < width; i++) {
147
+ int uv_offset = pUV + (i >> 1) * uvPixelStride;
148
+
149
+ out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
150
+ }
151
+ }
152
+ }
153
+
154
+ /**
155
+ * Returns a transformation matrix from one reference frame into another. Handles cropping (if
156
+ * maintaining aspect ratio is desired) and rotation.
157
+ *
158
+ * @param srcWidth Width of source frame.
159
+ * @param srcHeight Height of source frame.
160
+ * @param dstWidth Width of destination frame.
161
+ * @param dstHeight Height of destination frame.
162
+ * @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
163
+ * of 90.
164
+ * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
165
+ * cropping the image if necessary.
166
+ * @return The transformation fulfilling the desired requirements.
167
+ */
168
+ public static Matrix getTransformationMatrix(
169
+ final int srcWidth,
170
+ final int srcHeight,
171
+ final int dstWidth,
172
+ final int dstHeight,
173
+ final int applyRotation,
174
+ final boolean maintainAspectRatio) {
175
+ final Matrix matrix = new Matrix();
176
+
177
+ if (applyRotation != 0) {
178
+ if (applyRotation % 90 != 0) {
179
+ LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
180
+ }
181
+
182
+ // Translate so center of image is at origin.
183
+ matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
184
+
185
+ // Rotate around origin.
186
+ matrix.postRotate(applyRotation);
187
+ }
188
+
189
+ // Account for the already applied rotation, if any, and then determine how
190
+ // much scaling is needed for each axis.
191
+ final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
192
+
193
+ final int inWidth = transpose ? srcHeight : srcWidth;
194
+ final int inHeight = transpose ? srcWidth : srcHeight;
195
+
196
+ // Apply scaling if necessary.
197
+ if (inWidth != dstWidth || inHeight != dstHeight) {
198
+ final float scaleFactorX = dstWidth / (float) inWidth;
199
+ final float scaleFactorY = dstHeight / (float) inHeight;
200
+
201
+ if (maintainAspectRatio) {
202
+ // Scale by minimum factor so that dst is filled completely while
203
+ // maintaining the aspect ratio. Some image may fall off the edge.
204
+ final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
205
+ matrix.postScale(scaleFactor, scaleFactor);
206
+ } else {
207
+ // Scale exactly to fill dst from src.
208
+ matrix.postScale(scaleFactorX, scaleFactorY);
209
+ }
210
+ }
211
+
212
+ if (applyRotation != 0) {
213
+ // Translate back from origin centered reference to destination frame.
214
+ matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
215
+ }
216
+
217
+ return matrix;
218
+ }
219
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Logger.java ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.env;
17
+
18
+ import android.util.Log;
19
+ import java.util.HashSet;
20
+ import java.util.Set;
21
+
22
+ /** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */
23
+ public final class Logger {
24
+ private static final String DEFAULT_TAG = "tensorflow";
25
+ private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
26
+
27
+ // Classes to be ignored when examining the stack trace
28
+ private static final Set<String> IGNORED_CLASS_NAMES;
29
+
30
+ static {
31
+ IGNORED_CLASS_NAMES = new HashSet<String>(3);
32
+ IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
33
+ IGNORED_CLASS_NAMES.add("java.lang.Thread");
34
+ IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
35
+ }
36
+
37
+ private final String tag;
38
+ private final String messagePrefix;
39
+ private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
40
+
41
+ /**
42
+ * Creates a Logger using the class name as the message prefix.
43
+ *
44
+ * @param clazz the simple name of this class is used as the message prefix.
45
+ */
46
+ public Logger(final Class<?> clazz) {
47
+ this(clazz.getSimpleName());
48
+ }
49
+
50
+ /**
51
+ * Creates a Logger using the specified message prefix.
52
+ *
53
+ * @param messagePrefix is prepended to the text of every message.
54
+ */
55
+ public Logger(final String messagePrefix) {
56
+ this(DEFAULT_TAG, messagePrefix);
57
+ }
58
+
59
+ /**
60
+ * Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
61
+ *
62
+ * <pre>null</pre>
63
+ *
64
+ * , the caller's class name is used as the prefix.
65
+ *
66
+ * @param tag identifies the source of a log message.
67
+ * @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
68
+ * being used
69
+ */
70
+ public Logger(final String tag, final String messagePrefix) {
71
+ this.tag = tag;
72
+ final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
73
+ this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
74
+ }
75
+
76
+ /** Creates a Logger using the caller's class name as the message prefix. */
77
+ public Logger() {
78
+ this(DEFAULT_TAG, null);
79
+ }
80
+
81
+ /** Creates a Logger using the caller's class name as the message prefix. */
82
+ public Logger(final int minLogLevel) {
83
+ this(DEFAULT_TAG, null);
84
+ this.minLogLevel = minLogLevel;
85
+ }
86
+
87
+ /**
88
+ * Return caller's simple name.
89
+ *
90
+ * <p>Android getStackTrace() returns an array that looks like this: stackTrace[0]:
91
+ * dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
92
+ * com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
93
+ * com.google.android.apps.unveil.BaseApplication
94
+ *
95
+ * <p>This function returns the simple version of the first non-filtered name.
96
+ *
97
+ * @return caller's simple name
98
+ */
99
+ private static String getCallerSimpleName() {
100
+ // Get the current callstack so we can pull the class of the caller off of it.
101
+ final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
102
+
103
+ for (final StackTraceElement elem : stackTrace) {
104
+ final String className = elem.getClassName();
105
+ if (!IGNORED_CLASS_NAMES.contains(className)) {
106
+ // We're only interested in the simple name of the class, not the complete package.
107
+ final String[] classParts = className.split("\\.");
108
+ return classParts[classParts.length - 1];
109
+ }
110
+ }
111
+
112
+ return Logger.class.getSimpleName();
113
+ }
114
+
115
+ public void setMinLogLevel(final int minLogLevel) {
116
+ this.minLogLevel = minLogLevel;
117
+ }
118
+
119
+ public boolean isLoggable(final int logLevel) {
120
+ return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
121
+ }
122
+
123
+ private String toMessage(final String format, final Object... args) {
124
+ return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
125
+ }
126
+
127
+ public void v(final String format, final Object... args) {
128
+ if (isLoggable(Log.VERBOSE)) {
129
+ Log.v(tag, toMessage(format, args));
130
+ }
131
+ }
132
+
133
+ public void v(final Throwable t, final String format, final Object... args) {
134
+ if (isLoggable(Log.VERBOSE)) {
135
+ Log.v(tag, toMessage(format, args), t);
136
+ }
137
+ }
138
+
139
+ public void d(final String format, final Object... args) {
140
+ if (isLoggable(Log.DEBUG)) {
141
+ Log.d(tag, toMessage(format, args));
142
+ }
143
+ }
144
+
145
+ public void d(final Throwable t, final String format, final Object... args) {
146
+ if (isLoggable(Log.DEBUG)) {
147
+ Log.d(tag, toMessage(format, args), t);
148
+ }
149
+ }
150
+
151
+ public void i(final String format, final Object... args) {
152
+ if (isLoggable(Log.INFO)) {
153
+ Log.i(tag, toMessage(format, args));
154
+ }
155
+ }
156
+
157
+ public void i(final Throwable t, final String format, final Object... args) {
158
+ if (isLoggable(Log.INFO)) {
159
+ Log.i(tag, toMessage(format, args), t);
160
+ }
161
+ }
162
+
163
+ public void w(final String format, final Object... args) {
164
+ if (isLoggable(Log.WARN)) {
165
+ Log.w(tag, toMessage(format, args));
166
+ }
167
+ }
168
+
169
+ public void w(final Throwable t, final String format, final Object... args) {
170
+ if (isLoggable(Log.WARN)) {
171
+ Log.w(tag, toMessage(format, args), t);
172
+ }
173
+ }
174
+
175
+ public void e(final String format, final Object... args) {
176
+ if (isLoggable(Log.ERROR)) {
177
+ Log.e(tag, toMessage(format, args));
178
+ }
179
+ }
180
+
181
+ public void e(final Throwable t, final String format, final Object... args) {
182
+ if (isLoggable(Log.ERROR)) {
183
+ Log.e(tag, toMessage(format, args), t);
184
+ }
185
+ }
186
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Size.java ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.env;
17
+
18
+ import android.graphics.Bitmap;
19
+ import android.text.TextUtils;
20
+ import java.io.Serializable;
21
+ import java.util.ArrayList;
22
+ import java.util.List;
23
+
24
+ /** Size class independent of a Camera object. */
25
+ public class Size implements Comparable<Size>, Serializable {
26
+
27
+ // 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
28
+ // upgrading.
29
+ public static final long serialVersionUID = 7689808733290872361L;
30
+
31
+ public final int width;
32
+ public final int height;
33
+
34
+ public Size(final int width, final int height) {
35
+ this.width = width;
36
+ this.height = height;
37
+ }
38
+
39
+ public Size(final Bitmap bmp) {
40
+ this.width = bmp.getWidth();
41
+ this.height = bmp.getHeight();
42
+ }
43
+
44
+ /**
45
+ * Rotate a size by the given number of degrees.
46
+ *
47
+ * @param size Size to rotate.
48
+ * @param rotation Degrees {0, 90, 180, 270} to rotate the size.
49
+ * @return Rotated size.
50
+ */
51
+ public static Size getRotatedSize(final Size size, final int rotation) {
52
+ if (rotation % 180 != 0) {
53
+ // The phone is portrait, therefore the camera is sideways and frame should be rotated.
54
+ return new Size(size.height, size.width);
55
+ }
56
+ return size;
57
+ }
58
+
59
+ public static Size parseFromString(String sizeString) {
60
+ if (TextUtils.isEmpty(sizeString)) {
61
+ return null;
62
+ }
63
+
64
+ sizeString = sizeString.trim();
65
+
66
+ // The expected format is "<width>x<height>".
67
+ final String[] components = sizeString.split("x");
68
+ if (components.length == 2) {
69
+ try {
70
+ final int width = Integer.parseInt(components[0]);
71
+ final int height = Integer.parseInt(components[1]);
72
+ return new Size(width, height);
73
+ } catch (final NumberFormatException e) {
74
+ return null;
75
+ }
76
+ } else {
77
+ return null;
78
+ }
79
+ }
80
+
81
+ public static List<Size> sizeStringToList(final String sizes) {
82
+ final List<Size> sizeList = new ArrayList<Size>();
83
+ if (sizes != null) {
84
+ final String[] pairs = sizes.split(",");
85
+ for (final String pair : pairs) {
86
+ final Size size = Size.parseFromString(pair);
87
+ if (size != null) {
88
+ sizeList.add(size);
89
+ }
90
+ }
91
+ }
92
+ return sizeList;
93
+ }
94
+
95
+ public static String sizeListToString(final List<Size> sizes) {
96
+ String sizesString = "";
97
+ if (sizes != null && sizes.size() > 0) {
98
+ sizesString = sizes.get(0).toString();
99
+ for (int i = 1; i < sizes.size(); i++) {
100
+ sizesString += "," + sizes.get(i).toString();
101
+ }
102
+ }
103
+ return sizesString;
104
+ }
105
+
106
+ public static final String dimensionsAsString(final int width, final int height) {
107
+ return width + "x" + height;
108
+ }
109
+
110
+ public final float aspectRatio() {
111
+ return (float) width / (float) height;
112
+ }
113
+
114
+ @Override
115
+ public int compareTo(final Size other) {
116
+ return width * height - other.width * other.height;
117
+ }
118
+
119
+ @Override
120
+ public boolean equals(final Object other) {
121
+ if (other == null) {
122
+ return false;
123
+ }
124
+
125
+ if (!(other instanceof Size)) {
126
+ return false;
127
+ }
128
+
129
+ final Size otherSize = (Size) other;
130
+ return (width == otherSize.width && height == otherSize.height);
131
+ }
132
+
133
+ @Override
134
+ public int hashCode() {
135
+ return width * 32713 + height;
136
+ }
137
+
138
+ @Override
139
+ public String toString() {
140
+ return dimensionsAsString(width, height);
141
+ }
142
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/env/Utils.java ADDED
@@ -0,0 +1,188 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ package org.tensorflow.lite.examples.detection.env;
2
+
3
+ import android.content.Context;
4
+ import android.content.res.AssetFileDescriptor;
5
+ import android.content.res.AssetManager;
6
+ import android.graphics.Bitmap;
7
+ import android.graphics.BitmapFactory;
8
+ import android.graphics.Canvas;
9
+ import android.graphics.Matrix;
10
+ import android.os.Environment;
11
+ import android.util.Log;
12
+
13
+ import org.tensorflow.lite.examples.detection.MainActivity;
14
+
15
+ import java.io.File;
16
+ import java.io.FileInputStream;
17
+ import java.io.FileOutputStream;
18
+ import java.io.IOException;
19
+ import java.io.InputStream;
20
+ import java.io.OutputStreamWriter;
21
+ import java.nio.MappedByteBuffer;
22
+ import java.nio.channels.FileChannel;
23
+
24
+ public class Utils {
25
+
26
+ /**
27
+ * Memory-map the model file in Assets.
28
+ */
29
+ public static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
30
+ throws IOException {
31
+ AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
32
+ FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
33
+ FileChannel fileChannel = inputStream.getChannel();
34
+ long startOffset = fileDescriptor.getStartOffset();
35
+ long declaredLength = fileDescriptor.getDeclaredLength();
36
+ return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
37
+ }
38
+
39
+ public static void softmax(final float[] vals) {
40
+ float max = Float.NEGATIVE_INFINITY;
41
+ for (final float val : vals) {
42
+ max = Math.max(max, val);
43
+ }
44
+ float sum = 0.0f;
45
+ for (int i = 0; i < vals.length; ++i) {
46
+ vals[i] = (float) Math.exp(vals[i] - max);
47
+ sum += vals[i];
48
+ }
49
+ for (int i = 0; i < vals.length; ++i) {
50
+ vals[i] = vals[i] / sum;
51
+ }
52
+ }
53
+
54
+ public static float expit(final float x) {
55
+ return (float) (1. / (1. + Math.exp(-x)));
56
+ }
57
+
58
+ // public static Bitmap scale(Context context, String filePath) {
59
+ // AssetManager assetManager = context.getAssets();
60
+ //
61
+ // InputStream istr;
62
+ // Bitmap bitmap = null;
63
+ // try {
64
+ // istr = assetManager.open(filePath);
65
+ // bitmap = BitmapFactory.decodeStream(istr);
66
+ // bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
67
+ // } catch (IOException e) {
68
+ // // handle exception
69
+ // Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
70
+ // }
71
+ //
72
+ // return bitmap;
73
+ // }
74
+
75
+ public static Bitmap getBitmapFromAsset(Context context, String filePath) {
76
+ AssetManager assetManager = context.getAssets();
77
+
78
+ InputStream istr;
79
+ Bitmap bitmap = null;
80
+ try {
81
+ istr = assetManager.open(filePath);
82
+ bitmap = BitmapFactory.decodeStream(istr);
83
+ // return bitmap.copy(Bitmap.Config.ARGB_8888,true);
84
+ } catch (IOException e) {
85
+ // handle exception
86
+ Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
87
+ }
88
+
89
+ return bitmap;
90
+ }
91
+
92
+ /**
93
+ * Returns a transformation matrix from one reference frame into another.
94
+ * Handles cropping (if maintaining aspect ratio is desired) and rotation.
95
+ *
96
+ * @param srcWidth Width of source frame.
97
+ * @param srcHeight Height of source frame.
98
+ * @param dstWidth Width of destination frame.
99
+ * @param dstHeight Height of destination frame.
100
+ * @param applyRotation Amount of rotation to apply from one frame to another.
101
+ * Must be a multiple of 90.
102
+ * @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
103
+ * cropping the image if necessary.
104
+ * @return The transformation fulfilling the desired requirements.
105
+ */
106
+ public static Matrix getTransformationMatrix(
107
+ final int srcWidth,
108
+ final int srcHeight,
109
+ final int dstWidth,
110
+ final int dstHeight,
111
+ final int applyRotation,
112
+ final boolean maintainAspectRatio) {
113
+ final Matrix matrix = new Matrix();
114
+
115
+ if (applyRotation != 0) {
116
+ // Translate so center of image is at origin.
117
+ matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
118
+
119
+ // Rotate around origin.
120
+ matrix.postRotate(applyRotation);
121
+ }
122
+
123
+ // Account for the already applied rotation, if any, and then determine how
124
+ // much scaling is needed for each axis.
125
+ final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
126
+
127
+ final int inWidth = transpose ? srcHeight : srcWidth;
128
+ final int inHeight = transpose ? srcWidth : srcHeight;
129
+
130
+ // Apply scaling if necessary.
131
+ if (inWidth != dstWidth || inHeight != dstHeight) {
132
+ final float scaleFactorX = dstWidth / (float) inWidth;
133
+ final float scaleFactorY = dstHeight / (float) inHeight;
134
+
135
+ if (maintainAspectRatio) {
136
+ // Scale by minimum factor so that dst is filled completely while
137
+ // maintaining the aspect ratio. Some image may fall off the edge.
138
+ final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
139
+ matrix.postScale(scaleFactor, scaleFactor);
140
+ } else {
141
+ // Scale exactly to fill dst from src.
142
+ matrix.postScale(scaleFactorX, scaleFactorY);
143
+ }
144
+ }
145
+
146
+ if (applyRotation != 0) {
147
+ // Translate back from origin centered reference to destination frame.
148
+ matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
149
+ }
150
+
151
+ return matrix;
152
+ }
153
+
154
+ public static Bitmap processBitmap(Bitmap source, int size){
155
+
156
+ int image_height = source.getHeight();
157
+ int image_width = source.getWidth();
158
+
159
+ Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
160
+
161
+ Matrix frameToCropTransformations = getTransformationMatrix(image_width,image_height,size,size,0,false);
162
+ Matrix cropToFrameTransformations = new Matrix();
163
+ frameToCropTransformations.invert(cropToFrameTransformations);
164
+
165
+ final Canvas canvas = new Canvas(croppedBitmap);
166
+ canvas.drawBitmap(source, frameToCropTransformations, null);
167
+
168
+ return croppedBitmap;
169
+ }
170
+
171
+ public static void writeToFile(String data, Context context) {
172
+ try {
173
+ String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
174
+ String fileName = "myFile.txt";
175
+
176
+ File file = new File(baseDir + File.separator + fileName);
177
+
178
+ FileOutputStream stream = new FileOutputStream(file);
179
+ try {
180
+ stream.write(data.getBytes());
181
+ } finally {
182
+ stream.close();
183
+ }
184
+ } catch (IOException e) {
185
+ Log.e("Exception", "File write failed: " + e.toString());
186
+ }
187
+ }
188
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/Classifier.java ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.tflite;
17
+
18
+ import android.graphics.Bitmap;
19
+ import android.graphics.RectF;
20
+
21
+ import java.util.List;
22
+
23
+ /**
24
+ * Generic interface for interacting with different recognition engines.
25
+ */
26
+ public interface Classifier {
27
+ List<Recognition> recognizeImage(Bitmap bitmap);
28
+
29
+ void enableStatLogging(final boolean debug);
30
+
31
+ String getStatString();
32
+
33
+ void close();
34
+
35
+ void setNumThreads(int num_threads);
36
+
37
+ void setUseNNAPI(boolean isChecked);
38
+
39
+ abstract float getObjThresh();
40
+
41
+ /**
42
+ * An immutable result returned by a Classifier describing what was recognized.
43
+ */
44
+ public class Recognition {
45
+ /**
46
+ * A unique identifier for what has been recognized. Specific to the class, not the instance of
47
+ * the object.
48
+ */
49
+ private final String id;
50
+
51
+ /**
52
+ * Display name for the recognition.
53
+ */
54
+ private final String title;
55
+
56
+ /**
57
+ * A sortable score for how good the recognition is relative to others. Higher should be better.
58
+ */
59
+ private final Float confidence;
60
+
61
+ /**
62
+ * Optional location within the source image for the location of the recognized object.
63
+ */
64
+ private RectF location;
65
+
66
+ private int detectedClass;
67
+
68
+ public Recognition(
69
+ final String id, final String title, final Float confidence, final RectF location) {
70
+ this.id = id;
71
+ this.title = title;
72
+ this.confidence = confidence;
73
+ this.location = location;
74
+ }
75
+
76
+ public Recognition(final String id, final String title, final Float confidence, final RectF location, int detectedClass) {
77
+ this.id = id;
78
+ this.title = title;
79
+ this.confidence = confidence;
80
+ this.location = location;
81
+ this.detectedClass = detectedClass;
82
+ }
83
+
84
+ public String getId() {
85
+ return id;
86
+ }
87
+
88
+ public String getTitle() {
89
+ return title;
90
+ }
91
+
92
+ public Float getConfidence() {
93
+ return confidence;
94
+ }
95
+
96
+ public RectF getLocation() {
97
+ return new RectF(location);
98
+ }
99
+
100
+ public void setLocation(RectF location) {
101
+ this.location = location;
102
+ }
103
+
104
+ public int getDetectedClass() {
105
+ return detectedClass;
106
+ }
107
+
108
+ public void setDetectedClass(int detectedClass) {
109
+ this.detectedClass = detectedClass;
110
+ }
111
+
112
+ @Override
113
+ public String toString() {
114
+ String resultString = "";
115
+ if (id != null) {
116
+ resultString += "[" + id + "] ";
117
+ }
118
+
119
+ if (title != null) {
120
+ resultString += title + " ";
121
+ }
122
+
123
+ if (confidence != null) {
124
+ resultString += String.format("(%.1f%%) ", confidence * 100.0f);
125
+ }
126
+
127
+ if (location != null) {
128
+ resultString += location + " ";
129
+ }
130
+
131
+ return resultString.trim();
132
+ }
133
+ }
134
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/tflite/YoloV4Classifier.java ADDED
@@ -0,0 +1,599 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+ Licensed under the Apache License, Version 2.0 (the "License");
3
+ you may not use this file except in compliance with the License.
4
+ You may obtain a copy of the License at
5
+ http://www.apache.org/licenses/LICENSE-2.0
6
+ Unless required by applicable law or agreed to in writing, software
7
+ distributed under the License is distributed on an "AS IS" BASIS,
8
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
9
+ See the License for the specific language governing permissions and
10
+ limitations under the License.
11
+ ==============================================================================*/
12
+
13
+ package org.tensorflow.lite.examples.detection.tflite;
14
+
15
+ import android.content.res.AssetManager;
16
+ import android.graphics.Bitmap;
17
+ import android.graphics.RectF;
18
+ import android.os.Build;
19
+ import android.os.Trace;
20
+ import android.util.Log;
21
+
22
+ import java.io.BufferedReader;
23
+ import java.io.FileInputStream;
24
+ import java.io.IOException;
25
+ import java.io.InputStream;
26
+ import java.io.InputStreamReader;
27
+ import java.nio.ByteBuffer;
28
+ import java.nio.ByteOrder;
29
+ import java.util.ArrayList;
30
+ import java.util.Comparator;
31
+ import java.util.HashMap;
32
+ import java.util.List;
33
+ import java.util.Map;
34
+ import java.util.PriorityQueue;
35
+ import java.util.Vector;
36
+
37
+ import org.json.JSONArray;
38
+ import org.json.JSONException;
39
+ import org.json.JSONObject;
40
+ import org.tensorflow.lite.Interpreter;
41
+ import org.tensorflow.lite.examples.detection.MainActivity;
42
+ import org.tensorflow.lite.examples.detection.env.Logger;
43
+ import org.tensorflow.lite.examples.detection.env.Utils;
44
+
45
+ import static org.tensorflow.lite.examples.detection.env.Utils.expit;
46
+ import static org.tensorflow.lite.examples.detection.env.Utils.softmax;
47
+
48
+ import org.tensorflow.lite.Interpreter;
49
+ import org.tensorflow.lite.gpu.GpuDelegate;
50
+ import org.tensorflow.lite.nnapi.NnApiDelegate;
51
+
52
+ /**
53
+ * Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
54
+ * - https://github.com/tensorflow/models/tree/master/research/object_detection
55
+ * where you can find the training code.
56
+ * <p>
57
+ * To use pretrained models in the API or convert to TF Lite models, please see docs for details:
58
+ * - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
59
+ * - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
60
+ */
61
+ public class YoloV4Classifier implements Classifier {
62
+
63
+ /**
64
+ * Initializes a native TensorFlow session for classifying images.
65
+ *
66
+ * @param assetManager The asset manager to be used to load assets.
67
+ * @param modelFilename The filepath of the model GraphDef protocol buffer.
68
+ * @param labelFilename The filepath of label file for classes.
69
+ * @param isQuantized Boolean representing model is quantized or not
70
+ */
71
+ public static Classifier create(
72
+ final AssetManager assetManager,
73
+ final String modelFilename,
74
+ final String labelFilename,
75
+ final boolean isQuantized)
76
+ throws IOException {
77
+ final YoloV4Classifier d = new YoloV4Classifier();
78
+
79
+ String actualFilename = labelFilename.split("file:///android_asset/")[1];
80
+ InputStream labelsInput = assetManager.open(actualFilename);
81
+ BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput));
82
+ String line;
83
+ while ((line = br.readLine()) != null) {
84
+ LOGGER.w(line);
85
+ d.labels.add(line);
86
+ }
87
+ br.close();
88
+
89
+ try {
90
+ Interpreter.Options options = (new Interpreter.Options());
91
+ options.setNumThreads(NUM_THREADS);
92
+ if (isNNAPI) {
93
+ NnApiDelegate nnApiDelegate = null;
94
+ // Initialize interpreter with NNAPI delegate for Android Pie or above
95
+ if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
96
+ nnApiDelegate = new NnApiDelegate();
97
+ options.addDelegate(nnApiDelegate);
98
+ options.setNumThreads(NUM_THREADS);
99
+ options.setUseNNAPI(false);
100
+ options.setAllowFp16PrecisionForFp32(true);
101
+ options.setAllowBufferHandleOutput(true);
102
+ options.setUseNNAPI(true);
103
+ }
104
+ }
105
+ if (isGPU) {
106
+ GpuDelegate gpuDelegate = new GpuDelegate();
107
+ options.addDelegate(gpuDelegate);
108
+ }
109
+ d.tfLite = new Interpreter(Utils.loadModelFile(assetManager, modelFilename), options);
110
+ } catch (Exception e) {
111
+ throw new RuntimeException(e);
112
+ }
113
+
114
+ d.isModelQuantized = isQuantized;
115
+ // Pre-allocate buffers.
116
+ int numBytesPerChannel;
117
+ if (isQuantized) {
118
+ numBytesPerChannel = 1; // Quantized
119
+ } else {
120
+ numBytesPerChannel = 4; // Floating point
121
+ }
122
+ d.imgData = ByteBuffer.allocateDirect(1 * d.INPUT_SIZE * d.INPUT_SIZE * 3 * numBytesPerChannel);
123
+ d.imgData.order(ByteOrder.nativeOrder());
124
+ d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
125
+
126
+ return d;
127
+ }
128
+
129
+ @Override
130
+ public void enableStatLogging(final boolean logStats) {
131
+ }
132
+
133
+ @Override
134
+ public String getStatString() {
135
+ return "";
136
+ }
137
+
138
+ @Override
139
+ public void close() {
140
+ }
141
+
142
+ public void setNumThreads(int num_threads) {
143
+ if (tfLite != null) tfLite.setNumThreads(num_threads);
144
+ }
145
+
146
+ @Override
147
+ public void setUseNNAPI(boolean isChecked) {
148
+ if (tfLite != null) tfLite.setUseNNAPI(isChecked);
149
+ }
150
+
151
+ @Override
152
+ public float getObjThresh() {
153
+ return MainActivity.MINIMUM_CONFIDENCE_TF_OD_API;
154
+ }
155
+
156
+ private static final Logger LOGGER = new Logger();
157
+
158
+ // Float model
159
+ private static final float IMAGE_MEAN = 0;
160
+
161
+ private static final float IMAGE_STD = 255.0f;
162
+
163
+ //config yolov4
164
+ private static final int INPUT_SIZE = 416;
165
+ private static final int[] OUTPUT_WIDTH = new int[]{52, 26, 13};
166
+
167
+ private static final int[][] MASKS = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
168
+ private static final int[] ANCHORS = new int[]{
169
+ 12, 16, 19, 36, 40, 28, 36, 75, 76, 55, 72, 146, 142, 110, 192, 243, 459, 401
170
+ };
171
+ private static final float[] XYSCALE = new float[]{1.2f, 1.1f, 1.05f};
172
+
173
+ private static final int NUM_BOXES_PER_BLOCK = 3;
174
+
175
+ // Number of threads in the java app
176
+ private static final int NUM_THREADS = 4;
177
+ private static boolean isNNAPI = false;
178
+ private static boolean isGPU = true;
179
+
180
+ // tiny or not
181
+ private static boolean isTiny = false;
182
+
183
+ // config yolov4 tiny
184
+ private static final int[] OUTPUT_WIDTH_TINY = new int[]{2535, 2535};
185
+ private static final int[] OUTPUT_WIDTH_FULL = new int[]{10647, 10647};
186
+ private static final int[][] MASKS_TINY = new int[][]{{3, 4, 5}, {1, 2, 3}};
187
+ private static final int[] ANCHORS_TINY = new int[]{
188
+ 23, 27, 37, 58, 81, 82, 81, 82, 135, 169, 344, 319};
189
+ private static final float[] XYSCALE_TINY = new float[]{1.05f, 1.05f};
190
+
191
+ private boolean isModelQuantized;
192
+
193
+ // Config values.
194
+
195
+ // Pre-allocated buffers.
196
+ private Vector<String> labels = new Vector<String>();
197
+ private int[] intValues;
198
+
199
+ private ByteBuffer imgData;
200
+
201
+ private Interpreter tfLite;
202
+
203
+ private YoloV4Classifier() {
204
+ }
205
+
206
+ //non maximum suppression
207
+ protected ArrayList<Recognition> nms(ArrayList<Recognition> list) {
208
+ ArrayList<Recognition> nmsList = new ArrayList<Recognition>();
209
+
210
+ for (int k = 0; k < labels.size(); k++) {
211
+ //1.find max confidence per class
212
+ PriorityQueue<Recognition> pq =
213
+ new PriorityQueue<Recognition>(
214
+ 50,
215
+ new Comparator<Recognition>() {
216
+ @Override
217
+ public int compare(final Recognition lhs, final Recognition rhs) {
218
+ // Intentionally reversed to put high confidence at the head of the queue.
219
+ return Float.compare(rhs.getConfidence(), lhs.getConfidence());
220
+ }
221
+ });
222
+
223
+ for (int i = 0; i < list.size(); ++i) {
224
+ if (list.get(i).getDetectedClass() == k) {
225
+ pq.add(list.get(i));
226
+ }
227
+ }
228
+
229
+ //2.do non maximum suppression
230
+ while (pq.size() > 0) {
231
+ //insert detection with max confidence
232
+ Recognition[] a = new Recognition[pq.size()];
233
+ Recognition[] detections = pq.toArray(a);
234
+ Recognition max = detections[0];
235
+ nmsList.add(max);
236
+ pq.clear();
237
+
238
+ for (int j = 1; j < detections.length; j++) {
239
+ Recognition detection = detections[j];
240
+ RectF b = detection.getLocation();
241
+ if (box_iou(max.getLocation(), b) < mNmsThresh) {
242
+ pq.add(detection);
243
+ }
244
+ }
245
+ }
246
+ }
247
+ return nmsList;
248
+ }
249
+
250
+ protected float mNmsThresh = 0.6f;
251
+
252
+ protected float box_iou(RectF a, RectF b) {
253
+ return box_intersection(a, b) / box_union(a, b);
254
+ }
255
+
256
+ protected float box_intersection(RectF a, RectF b) {
257
+ float w = overlap((a.left + a.right) / 2, a.right - a.left,
258
+ (b.left + b.right) / 2, b.right - b.left);
259
+ float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top,
260
+ (b.top + b.bottom) / 2, b.bottom - b.top);
261
+ if (w < 0 || h < 0) return 0;
262
+ float area = w * h;
263
+ return area;
264
+ }
265
+
266
+ protected float box_union(RectF a, RectF b) {
267
+ float i = box_intersection(a, b);
268
+ float u = (a.right - a.left) * (a.bottom - a.top) + (b.right - b.left) * (b.bottom - b.top) - i;
269
+ return u;
270
+ }
271
+
272
+ protected float overlap(float x1, float w1, float x2, float w2) {
273
+ float l1 = x1 - w1 / 2;
274
+ float l2 = x2 - w2 / 2;
275
+ float left = l1 > l2 ? l1 : l2;
276
+ float r1 = x1 + w1 / 2;
277
+ float r2 = x2 + w2 / 2;
278
+ float right = r1 < r2 ? r1 : r2;
279
+ return right - left;
280
+ }
281
+
282
+ protected static final int BATCH_SIZE = 1;
283
+ protected static final int PIXEL_SIZE = 3;
284
+
285
+ /**
286
+ * Writes Image data into a {@code ByteBuffer}.
287
+ */
288
+ protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
289
+ ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * BATCH_SIZE * INPUT_SIZE * INPUT_SIZE * PIXEL_SIZE);
290
+ byteBuffer.order(ByteOrder.nativeOrder());
291
+ int[] intValues = new int[INPUT_SIZE * INPUT_SIZE];
292
+ bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
293
+ int pixel = 0;
294
+ for (int i = 0; i < INPUT_SIZE; ++i) {
295
+ for (int j = 0; j < INPUT_SIZE; ++j) {
296
+ final int val = intValues[pixel++];
297
+ byteBuffer.putFloat(((val >> 16) & 0xFF) / 255.0f);
298
+ byteBuffer.putFloat(((val >> 8) & 0xFF) / 255.0f);
299
+ byteBuffer.putFloat((val & 0xFF) / 255.0f);
300
+ }
301
+ }
302
+ return byteBuffer;
303
+ }
304
+
305
+ // private ArrayList<Recognition> getDetections(ByteBuffer byteBuffer, Bitmap bitmap) {
306
+ // ArrayList<Recognition> detections = new ArrayList<Recognition>();
307
+ // Map<Integer, Object> outputMap = new HashMap<>();
308
+ // for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
309
+ // float[][][][][] out = new float[1][OUTPUT_WIDTH[i]][OUTPUT_WIDTH[i]][3][5 + labels.size()];
310
+ // outputMap.put(i, out);
311
+ // }
312
+ //
313
+ // Log.d("YoloV4Classifier", "mObjThresh: " + getObjThresh());
314
+ //
315
+ // Object[] inputArray = {byteBuffer};
316
+ // tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
317
+ //
318
+ // for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
319
+ // int gridWidth = OUTPUT_WIDTH[i];
320
+ // float[][][][][] out = (float[][][][][]) outputMap.get(i);
321
+ //
322
+ // Log.d("YoloV4Classifier", "out[" + i + "] detect start");
323
+ // for (int y = 0; y < gridWidth; ++y) {
324
+ // for (int x = 0; x < gridWidth; ++x) {
325
+ // for (int b = 0; b < NUM_BOXES_PER_BLOCK; ++b) {
326
+ // final int offset =
327
+ // (gridWidth * (NUM_BOXES_PER_BLOCK * (labels.size() + 5))) * y
328
+ // + (NUM_BOXES_PER_BLOCK * (labels.size() + 5)) * x
329
+ // + (labels.size() + 5) * b;
330
+ //
331
+ // final float confidence = expit(out[0][y][x][b][4]);
332
+ // int detectedClass = -1;
333
+ // float maxClass = 0;
334
+ //
335
+ // final float[] classes = new float[labels.size()];
336
+ // for (int c = 0; c < labels.size(); ++c) {
337
+ // classes[c] = out[0][y][x][b][5 + c];
338
+ // }
339
+ //
340
+ // for (int c = 0; c < labels.size(); ++c) {
341
+ // if (classes[c] > maxClass) {
342
+ // detectedClass = c;
343
+ // maxClass = classes[c];
344
+ // }
345
+ // }
346
+ //
347
+ // final float confidenceInClass = maxClass * confidence;
348
+ // if (confidenceInClass > getObjThresh()) {
349
+ //// final float xPos = (x + (expit(out[0][y][x][b][0]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
350
+ //// final float yPos = (y + (expit(out[0][y][x][b][1]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
351
+ //
352
+ // final float xPos = (x + expit(out[0][y][x][b][0])) * (1.0f * INPUT_SIZE / gridWidth);
353
+ // final float yPos = (y + expit(out[0][y][x][b][1])) * (1.0f * INPUT_SIZE / gridWidth);
354
+ //
355
+ // final float w = (float) (Math.exp(out[0][y][x][b][2]) * ANCHORS[2 * MASKS[i][b]]);
356
+ // final float h = (float) (Math.exp(out[0][y][x][b][3]) * ANCHORS[2 * MASKS[i][b] + 1]);
357
+ //
358
+ // final RectF rect =
359
+ // new RectF(
360
+ // Math.max(0, xPos - w / 2),
361
+ // Math.max(0, yPos - h / 2),
362
+ // Math.min(bitmap.getWidth() - 1, xPos + w / 2),
363
+ // Math.min(bitmap.getHeight() - 1, yPos + h / 2));
364
+ // detections.add(new Recognition("" + offset, labels.get(detectedClass),
365
+ // confidenceInClass, rect, detectedClass));
366
+ // }
367
+ // }
368
+ // }
369
+ // }
370
+ // Log.d("YoloV4Classifier", "out[" + i + "] detect end");
371
+ // }
372
+ // return detections;
373
+ // }
374
+
375
+ /**
376
+ * For yolov4-tiny, the situation would be a little different from the yolov4, it only has two
377
+ * output. Both has three dimenstion. The first one is a tensor with dimension [1, 2535,4], containing all the bounding boxes.
378
+ * The second one is a tensor with dimension [1, 2535, class_num], containing all the classes score.
379
+ * @param byteBuffer input ByteBuffer, which contains the image information
380
+ * @param bitmap pixel disenty used to resize the output images
381
+ * @return an array list containing the recognitions
382
+ */
383
+
384
+ private ArrayList<Recognition> getDetectionsForFull(ByteBuffer byteBuffer, Bitmap bitmap) {
385
+ ArrayList<Recognition> detections = new ArrayList<Recognition>();
386
+ Map<Integer, Object> outputMap = new HashMap<>();
387
+ outputMap.put(0, new float[1][OUTPUT_WIDTH_FULL[0]][4]);
388
+ outputMap.put(1, new float[1][OUTPUT_WIDTH_FULL[1]][labels.size()]);
389
+ Object[] inputArray = {byteBuffer};
390
+ tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
391
+
392
+ int gridWidth = OUTPUT_WIDTH_FULL[0];
393
+ float[][][] bboxes = (float [][][]) outputMap.get(0);
394
+ float[][][] out_score = (float[][][]) outputMap.get(1);
395
+
396
+ for (int i = 0; i < gridWidth;i++){
397
+ float maxClass = 0;
398
+ int detectedClass = -1;
399
+ final float[] classes = new float[labels.size()];
400
+ for (int c = 0;c< labels.size();c++){
401
+ classes [c] = out_score[0][i][c];
402
+ }
403
+ for (int c = 0;c<labels.size();++c){
404
+ if (classes[c] > maxClass){
405
+ detectedClass = c;
406
+ maxClass = classes[c];
407
+ }
408
+ }
409
+ final float score = maxClass;
410
+ if (score > getObjThresh()){
411
+ final float xPos = bboxes[0][i][0];
412
+ final float yPos = bboxes[0][i][1];
413
+ final float w = bboxes[0][i][2];
414
+ final float h = bboxes[0][i][3];
415
+ final RectF rectF = new RectF(
416
+ Math.max(0, xPos - w / 2),
417
+ Math.max(0, yPos - h / 2),
418
+ Math.min(bitmap.getWidth() - 1, xPos + w / 2),
419
+ Math.min(bitmap.getHeight() - 1, yPos + h / 2));
420
+ detections.add(new Recognition("" + i, labels.get(detectedClass),score,rectF,detectedClass ));
421
+ }
422
+ }
423
+ return detections;
424
+ }
425
+
426
+ private ArrayList<Recognition> getDetectionsForTiny(ByteBuffer byteBuffer, Bitmap bitmap) {
427
+ ArrayList<Recognition> detections = new ArrayList<Recognition>();
428
+ Map<Integer, Object> outputMap = new HashMap<>();
429
+ outputMap.put(0, new float[1][OUTPUT_WIDTH_TINY[0]][4]);
430
+ outputMap.put(1, new float[1][OUTPUT_WIDTH_TINY[1]][labels.size()]);
431
+ Object[] inputArray = {byteBuffer};
432
+ tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
433
+
434
+ int gridWidth = OUTPUT_WIDTH_TINY[0];
435
+ float[][][] bboxes = (float [][][]) outputMap.get(0);
436
+ float[][][] out_score = (float[][][]) outputMap.get(1);
437
+
438
+ for (int i = 0; i < gridWidth;i++){
439
+ float maxClass = 0;
440
+ int detectedClass = -1;
441
+ final float[] classes = new float[labels.size()];
442
+ for (int c = 0;c< labels.size();c++){
443
+ classes [c] = out_score[0][i][c];
444
+ }
445
+ for (int c = 0;c<labels.size();++c){
446
+ if (classes[c] > maxClass){
447
+ detectedClass = c;
448
+ maxClass = classes[c];
449
+ }
450
+ }
451
+ final float score = maxClass;
452
+ if (score > getObjThresh()){
453
+ final float xPos = bboxes[0][i][0];
454
+ final float yPos = bboxes[0][i][1];
455
+ final float w = bboxes[0][i][2];
456
+ final float h = bboxes[0][i][3];
457
+ final RectF rectF = new RectF(
458
+ Math.max(0, xPos - w / 2),
459
+ Math.max(0, yPos - h / 2),
460
+ Math.min(bitmap.getWidth() - 1, xPos + w / 2),
461
+ Math.min(bitmap.getHeight() - 1, yPos + h / 2));
462
+ detections.add(new Recognition("" + i, labels.get(detectedClass),score,rectF,detectedClass ));
463
+ }
464
+ }
465
+ return detections;
466
+ }
467
+
468
+ public ArrayList<Recognition> recognizeImage(Bitmap bitmap) {
469
+ ByteBuffer byteBuffer = convertBitmapToByteBuffer(bitmap);
470
+
471
+ // Map<Integer, Object> outputMap = new HashMap<>();
472
+ // for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
473
+ // float[][][][][] out = new float[1][OUTPUT_WIDTH[i]][OUTPUT_WIDTH[i]][3][5 + labels.size()];
474
+ // outputMap.put(i, out);
475
+ // }
476
+ //
477
+ // Log.d("YoloV4Classifier", "mObjThresh: " + getObjThresh());
478
+ //
479
+ // Object[] inputArray = {byteBuffer};
480
+ // tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
481
+ //
482
+ // ArrayList<Recognition> detections = new ArrayList<Recognition>();
483
+ //
484
+ // for (int i = 0; i < OUTPUT_WIDTH.length; i++) {
485
+ // int gridWidth = OUTPUT_WIDTH[i];
486
+ // float[][][][][] out = (float[][][][][]) outputMap.get(i);
487
+ //
488
+ // Log.d("YoloV4Classifier", "out[" + i + "] detect start");
489
+ // for (int y = 0; y < gridWidth; ++y) {
490
+ // for (int x = 0; x < gridWidth; ++x) {
491
+ // for (int b = 0; b < NUM_BOXES_PER_BLOCK; ++b) {
492
+ // final int offset =
493
+ // (gridWidth * (NUM_BOXES_PER_BLOCK * (labels.size() + 5))) * y
494
+ // + (NUM_BOXES_PER_BLOCK * (labels.size() + 5)) * x
495
+ // + (labels.size() + 5) * b;
496
+ //
497
+ // final float confidence = expit(out[0][y][x][b][4]);
498
+ // int detectedClass = -1;
499
+ // float maxClass = 0;
500
+ //
501
+ // final float[] classes = new float[labels.size()];
502
+ // for (int c = 0; c < labels.size(); ++c) {
503
+ // classes[c] = out[0][y][x][b][5 + c];
504
+ // }
505
+ //
506
+ // for (int c = 0; c < labels.size(); ++c) {
507
+ // if (classes[c] > maxClass) {
508
+ // detectedClass = c;
509
+ // maxClass = classes[c];
510
+ // }
511
+ // }
512
+ //
513
+ // final float confidenceInClass = maxClass * confidence;
514
+ // if (confidenceInClass > getObjThresh()) {
515
+ //// final float xPos = (x + (expit(out[0][y][x][b][0]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
516
+ //// final float yPos = (y + (expit(out[0][y][x][b][1]) * XYSCALE[i]) - (0.5f * (XYSCALE[i] - 1))) * (INPUT_SIZE / gridWidth);
517
+ //
518
+ // final float xPos = (x + expit(out[0][y][x][b][0])) * (1.0f * INPUT_SIZE / gridWidth);
519
+ // final float yPos = (y + expit(out[0][y][x][b][1])) * (1.0f * INPUT_SIZE / gridWidth);
520
+ //
521
+ // final float w = (float) (Math.exp(out[0][y][x][b][2]) * ANCHORS[2 * MASKS[i][b]]);
522
+ // final float h = (float) (Math.exp(out[0][y][x][b][3]) * ANCHORS[2 * MASKS[i][b] + 1]);
523
+ //
524
+ // final RectF rect =
525
+ // new RectF(
526
+ // Math.max(0, xPos - w / 2),
527
+ // Math.max(0, yPos - h / 2),
528
+ // Math.min(bitmap.getWidth() - 1, xPos + w / 2),
529
+ // Math.min(bitmap.getHeight() - 1, yPos + h / 2));
530
+ // detections.add(new Recognition("" + offset, labels.get(detectedClass),
531
+ // confidenceInClass, rect, detectedClass));
532
+ // }
533
+ // }
534
+ // }
535
+ // }
536
+ // Log.d("YoloV4Classifier", "out[" + i + "] detect end");
537
+ // }
538
+ ArrayList<Recognition> detections;
539
+ if (isTiny) {
540
+ detections = getDetectionsForTiny(byteBuffer, bitmap);
541
+ } else {
542
+ detections = getDetectionsForFull(byteBuffer, bitmap);
543
+ }
544
+ final ArrayList<Recognition> recognitions = nms(detections);
545
+ return recognitions;
546
+ }
547
+
548
+ public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH, int intputSize) {
549
+ // (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
550
+ float halfHeight = height / 2.0f;
551
+ float halfWidth = width / 2.0f;
552
+
553
+ float[] pred_coor = new float[]{x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight};
554
+
555
+ // (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
556
+ float resize_ratioW = 1.0f * intputSize / oriW;
557
+ float resize_ratioH = 1.0f * intputSize / oriH;
558
+
559
+ float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; //min
560
+
561
+ float dw = (intputSize - resize_ratio * oriW) / 2;
562
+ float dh = (intputSize - resize_ratio * oriH) / 2;
563
+
564
+ pred_coor[0] = 1.0f * (pred_coor[0] - dw) / resize_ratio;
565
+ pred_coor[2] = 1.0f * (pred_coor[2] - dw) / resize_ratio;
566
+
567
+ pred_coor[1] = 1.0f * (pred_coor[1] - dh) / resize_ratio;
568
+ pred_coor[3] = 1.0f * (pred_coor[3] - dh) / resize_ratio;
569
+
570
+ // (3) clip some boxes those are out of range
571
+ pred_coor[0] = pred_coor[0] > 0 ? pred_coor[0] : 0;
572
+ pred_coor[1] = pred_coor[1] > 0 ? pred_coor[1] : 0;
573
+
574
+ pred_coor[2] = pred_coor[2] < (oriW - 1) ? pred_coor[2] : (oriW - 1);
575
+ pred_coor[3] = pred_coor[3] < (oriH - 1) ? pred_coor[3] : (oriH - 1);
576
+
577
+ if ((pred_coor[0] > pred_coor[2]) || (pred_coor[1] > pred_coor[3])) {
578
+ pred_coor[0] = 0;
579
+ pred_coor[1] = 0;
580
+ pred_coor[2] = 0;
581
+ pred_coor[3] = 0;
582
+ }
583
+
584
+ // (4) discard some invalid boxes
585
+ float temp1 = pred_coor[2] - pred_coor[0];
586
+ float temp2 = pred_coor[3] - pred_coor[1];
587
+ float temp = temp1 * temp2;
588
+ if (temp < 0) {
589
+ Log.e("checkInvalidateBox", "temp < 0");
590
+ return false;
591
+ }
592
+ if (Math.sqrt(temp) > Float.MAX_VALUE) {
593
+ Log.e("checkInvalidateBox", "temp max");
594
+ return false;
595
+ }
596
+
597
+ return true;
598
+ }
599
+ }
android/app/src/main/java/org/tensorflow/lite/examples/detection/tracking/MultiBoxTracker.java ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
14
+ ==============================================================================*/
15
+
16
+ package org.tensorflow.lite.examples.detection.tracking;
17
+
18
+ import android.content.Context;
19
+ import android.graphics.Canvas;
20
+ import android.graphics.Color;
21
+ import android.graphics.Matrix;
22
+ import android.graphics.Paint;
23
+ import android.graphics.Paint.Cap;
24
+ import android.graphics.Paint.Join;
25
+ import android.graphics.Paint.Style;
26
+ import android.graphics.RectF;
27
+ import android.text.TextUtils;
28
+ import android.util.Pair;
29
+ import android.util.TypedValue;
30
+ import java.util.LinkedList;
31
+ import java.util.List;
32
+ import java.util.Queue;
33
+ import org.tensorflow.lite.examples.detection.env.BorderedText;
34
+ import org.tensorflow.lite.examples.detection.env.ImageUtils;
35
+ import org.tensorflow.lite.examples.detection.env.Logger;
36
+ import org.tensorflow.lite.examples.detection.tflite.Classifier.Recognition;
37
+
38
+ /** A tracker that handles non-max suppression and matches existing objects to new detections. */
39
+ public class MultiBoxTracker {
40
+ private static final float TEXT_SIZE_DIP = 18;
41
+ private static final float MIN_SIZE = 16.0f;
42
+ private static final int[] COLORS = {
43
+ Color.BLUE,
44
+ Color.RED,
45
+ Color.GREEN,
46
+ Color.YELLOW,
47
+ Color.CYAN,
48
+ Color.MAGENTA,
49
+ Color.WHITE,
50
+ Color.parseColor("#55FF55"),
51
+ Color.parseColor("#FFA500"),
52
+ Color.parseColor("#FF8888"),
53
+ Color.parseColor("#AAAAFF"),
54
+ Color.parseColor("#FFFFAA"),
55
+ Color.parseColor("#55AAAA"),
56
+ Color.parseColor("#AA33AA"),
57
+ Color.parseColor("#0D0068")
58
+ };
59
+ final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
60
+ private final Logger logger = new Logger();
61
+ private final Queue<Integer> availableColors = new LinkedList<Integer>();
62
+ private final List<TrackedRecognition> trackedObjects = new LinkedList<TrackedRecognition>();
63
+ private final Paint boxPaint = new Paint();
64
+ private final float textSizePx;
65
+ private final BorderedText borderedText;
66
+ private Matrix frameToCanvasMatrix;
67
+ private int frameWidth;
68
+ private int frameHeight;
69
+ private int sensorOrientation;
70
+
71
+ public MultiBoxTracker(final Context context) {
72
+ for (final int color : COLORS) {
73
+ availableColors.add(color);
74
+ }
75
+
76
+ boxPaint.setColor(Color.RED);
77
+ boxPaint.setStyle(Style.STROKE);
78
+ boxPaint.setStrokeWidth(10.0f);
79
+ boxPaint.setStrokeCap(Cap.ROUND);
80
+ boxPaint.setStrokeJoin(Join.ROUND);
81
+ boxPaint.setStrokeMiter(100);
82
+
83
+ textSizePx =
84
+ TypedValue.applyDimension(
85
+ TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
86
+ borderedText = new BorderedText(textSizePx);
87
+ }
88
+
89
+ public synchronized void setFrameConfiguration(
90
+ final int width, final int height, final int sensorOrientation) {
91
+ frameWidth = width;
92
+ frameHeight = height;
93
+ this.sensorOrientation = sensorOrientation;
94
+ }
95
+
96
+ public synchronized void drawDebug(final Canvas canvas) {
97
+ final Paint textPaint = new Paint();
98
+ textPaint.setColor(Color.WHITE);
99
+ textPaint.setTextSize(60.0f);
100
+
101
+ final Paint boxPaint = new Paint();
102
+ boxPaint.setColor(Color.RED);
103
+ boxPaint.setAlpha(200);
104
+ boxPaint.setStyle(Style.STROKE);
105
+
106
+ for (final Pair<Float, RectF> detection : screenRects) {
107
+ final RectF rect = detection.second;
108
+ canvas.drawRect(rect, boxPaint);
109
+ canvas.drawText("" + detection.first, rect.left, rect.top, textPaint);
110
+ borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first);
111
+ }
112
+ }
113
+
114
+ public synchronized void trackResults(final List<Recognition> results, final long timestamp) {
115
+ logger.i("Processing %d results from %d", results.size(), timestamp);
116
+ processResults(results);
117
+ }
118
+
119
+ private Matrix getFrameToCanvasMatrix() {
120
+ return frameToCanvasMatrix;
121
+ }
122
+
123
+ public synchronized void draw(final Canvas canvas) {
124
+ final boolean rotated = sensorOrientation % 180 == 90;
125
+ final float multiplier =
126
+ Math.min(
127
+ canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
128
+ canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
129
+ frameToCanvasMatrix =
130
+ ImageUtils.getTransformationMatrix(
131
+ frameWidth,
132
+ frameHeight,
133
+ (int) (multiplier * (rotated ? frameHeight : frameWidth)),
134
+ (int) (multiplier * (rotated ? frameWidth : frameHeight)),
135
+ sensorOrientation,
136
+ false);
137
+ for (final TrackedRecognition recognition : trackedObjects) {
138
+ final RectF trackedPos = new RectF(recognition.location);
139
+
140
+ getFrameToCanvasMatrix().mapRect(trackedPos);
141
+ boxPaint.setColor(recognition.color);
142
+
143
+ float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
144
+ canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
145
+
146
+ final String labelString =
147
+ !TextUtils.isEmpty(recognition.title)
148
+ ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
149
+ : String.format("%.2f", (100 * recognition.detectionConfidence));
150
+ // borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top,
151
+ // labelString);
152
+ borderedText.drawText(
153
+ canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
154
+ }
155
+ }
156
+
157
+ private void processResults(final List<Recognition> results) {
158
+ final List<Pair<Float, Recognition>> rectsToTrack = new LinkedList<Pair<Float, Recognition>>();
159
+
160
+ screenRects.clear();
161
+ final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
162
+
163
+ for (final Recognition result : results) {
164
+ if (result.getLocation() == null) {
165
+ continue;
166
+ }
167
+ final RectF detectionFrameRect = new RectF(result.getLocation());
168
+
169
+ final RectF detectionScreenRect = new RectF();
170
+ rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
171
+
172
+ logger.v(
173
+ "Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
174
+
175
+ screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
176
+
177
+ if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
178
+ logger.w("Degenerate rectangle! " + detectionFrameRect);
179
+ continue;
180
+ }
181
+
182
+ rectsToTrack.add(new Pair<Float, Recognition>(result.getConfidence(), result));
183
+ }
184
+
185
+ trackedObjects.clear();
186
+ if (rectsToTrack.isEmpty()) {
187
+ logger.v("Nothing to track, aborting.");
188
+ return;
189
+ }
190
+
191
+ for (final Pair<Float, Recognition> potential : rectsToTrack) {
192
+ final TrackedRecognition trackedRecognition = new TrackedRecognition();
193
+ trackedRecognition.detectionConfidence = potential.first;
194
+ trackedRecognition.location = new RectF(potential.second.getLocation());
195
+ trackedRecognition.title = potential.second.getTitle();
196
+ trackedRecognition.color = COLORS[trackedObjects.size()];
197
+ trackedObjects.add(trackedRecognition);
198
+
199
+ if (trackedObjects.size() >= COLORS.length) {
200
+ break;
201
+ }
202
+ }
203
+ }
204
+
205
+ private static class TrackedRecognition {
206
+ RectF location;
207
+ float detectionConfidence;
208
+ int color;
209
+ String title;
210
+ }
211
+ }
android/app/src/main/res/drawable-hdpi/ic_launcher.png ADDED
android/app/src/main/res/drawable-mdpi/ic_launcher.png ADDED
android/app/src/main/res/drawable-v24/ic_launcher_foreground.xml ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <vector xmlns:android="http://schemas.android.com/apk/res/android"
2
+ xmlns:aapt="http://schemas.android.com/aapt"
3
+ android:width="108dp"
4
+ android:height="108dp"
5
+ android:viewportHeight="108"
6
+ android:viewportWidth="108">
7
+ <path
8
+ android:fillType="evenOdd"
9
+ android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
10
+ android:strokeColor="#00000000"
11
+ android:strokeWidth="1">
12
+ <aapt:attr name="android:fillColor">
13
+ <gradient
14
+ android:endX="78.5885"
15
+ android:endY="90.9159"
16
+ android:startX="48.7653"
17
+ android:startY="61.0927"
18
+ android:type="linear">
19
+ <item
20
+ android:color="#44000000"
21
+ android:offset="0.0"/>
22
+ <item
23
+ android:color="#00000000"
24
+ android:offset="1.0"/>
25
+ </gradient>
26
+ </aapt:attr>
27
+ </path>
28
+ <path
29
+ android:fillColor="#FFFFFF"
30
+ android:fillType="nonZero"
31
+ android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
32
+ android:strokeColor="#00000000"
33
+ android:strokeWidth="1"/>
34
+ </vector>
android/app/src/main/res/drawable-v24/kite.jpg ADDED
android/app/src/main/res/drawable-xxhdpi/ic_launcher.png ADDED
android/app/src/main/res/drawable-xxhdpi/icn_chevron_down.png ADDED
android/app/src/main/res/drawable-xxhdpi/icn_chevron_up.png ADDED
android/app/src/main/res/drawable-xxhdpi/tfl2_logo.png ADDED
android/app/src/main/res/drawable-xxhdpi/tfl2_logo_dark.png ADDED
android/app/src/main/res/drawable-xxxhdpi/caret.jpg ADDED
android/app/src/main/res/drawable-xxxhdpi/chair.jpg ADDED
android/app/src/main/res/drawable-xxxhdpi/sample_image.jpg ADDED
android/app/src/main/res/drawable/bottom_sheet_bg.xml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <shape xmlns:android="http://schemas.android.com/apk/res/android"
3
+ android:shape="rectangle">
4
+ <corners
5
+ android:topLeftRadius="@dimen/tfe_bottom_sheet_corner_radius"
6
+ android:topRightRadius="@dimen/tfe_bottom_sheet_corner_radius" />
7
+ <padding android:top="@dimen/tfe_bottom_sheet_top_padding" />
8
+ <solid android:color="@android:color/white" />
9
+ </shape>
android/app/src/main/res/drawable/ic_baseline_add.xml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ <vector xmlns:android="http://schemas.android.com/apk/res/android"
2
+ android:width="24dp"
3
+ android:height="24dp"
4
+ android:viewportWidth="24"
5
+ android:viewportHeight="24">
6
+ <path
7
+ android:fillColor="#FF000000"
8
+ android:pathData="M19,13h-6v6h-2v-6H5v-2h6V5h2v6h6v2z"/>
9
+ </vector>
android/app/src/main/res/drawable/ic_baseline_remove.xml ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ <vector xmlns:android="http://schemas.android.com/apk/res/android"
2
+ android:width="24dp"
3
+ android:height="24dp"
4
+ android:viewportWidth="24"
5
+ android:viewportHeight="24">
6
+ <path
7
+ android:fillColor="#FF000000"
8
+ android:pathData="M19,13H5v-2h14v2z"/>
9
+ </vector>
android/app/src/main/res/drawable/ic_launcher_background.xml ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <vector xmlns:android="http://schemas.android.com/apk/res/android"
3
+ android:width="108dp"
4
+ android:height="108dp"
5
+ android:viewportHeight="108"
6
+ android:viewportWidth="108">
7
+ <path
8
+ android:fillColor="#26A69A"
9
+ android:pathData="M0,0h108v108h-108z" />
10
+ <path
11
+ android:fillColor="#00000000"
12
+ android:pathData="M9,0L9,108"
13
+ android:strokeColor="#33FFFFFF"
14
+ android:strokeWidth="0.8" />
15
+ <path
16
+ android:fillColor="#00000000"
17
+ android:pathData="M19,0L19,108"
18
+ android:strokeColor="#33FFFFFF"
19
+ android:strokeWidth="0.8" />
20
+ <path
21
+ android:fillColor="#00000000"
22
+ android:pathData="M29,0L29,108"
23
+ android:strokeColor="#33FFFFFF"
24
+ android:strokeWidth="0.8" />
25
+ <path
26
+ android:fillColor="#00000000"
27
+ android:pathData="M39,0L39,108"
28
+ android:strokeColor="#33FFFFFF"
29
+ android:strokeWidth="0.8" />
30
+ <path
31
+ android:fillColor="#00000000"
32
+ android:pathData="M49,0L49,108"
33
+ android:strokeColor="#33FFFFFF"
34
+ android:strokeWidth="0.8" />
35
+ <path
36
+ android:fillColor="#00000000"
37
+ android:pathData="M59,0L59,108"
38
+ android:strokeColor="#33FFFFFF"
39
+ android:strokeWidth="0.8" />
40
+ <path
41
+ android:fillColor="#00000000"
42
+ android:pathData="M69,0L69,108"
43
+ android:strokeColor="#33FFFFFF"
44
+ android:strokeWidth="0.8" />
45
+ <path
46
+ android:fillColor="#00000000"
47
+ android:pathData="M79,0L79,108"
48
+ android:strokeColor="#33FFFFFF"
49
+ android:strokeWidth="0.8" />
50
+ <path
51
+ android:fillColor="#00000000"
52
+ android:pathData="M89,0L89,108"
53
+ android:strokeColor="#33FFFFFF"
54
+ android:strokeWidth="0.8" />
55
+ <path
56
+ android:fillColor="#00000000"
57
+ android:pathData="M99,0L99,108"
58
+ android:strokeColor="#33FFFFFF"
59
+ android:strokeWidth="0.8" />
60
+ <path
61
+ android:fillColor="#00000000"
62
+ android:pathData="M0,9L108,9"
63
+ android:strokeColor="#33FFFFFF"
64
+ android:strokeWidth="0.8" />
65
+ <path
66
+ android:fillColor="#00000000"
67
+ android:pathData="M0,19L108,19"
68
+ android:strokeColor="#33FFFFFF"
69
+ android:strokeWidth="0.8" />
70
+ <path
71
+ android:fillColor="#00000000"
72
+ android:pathData="M0,29L108,29"
73
+ android:strokeColor="#33FFFFFF"
74
+ android:strokeWidth="0.8" />
75
+ <path
76
+ android:fillColor="#00000000"
77
+ android:pathData="M0,39L108,39"
78
+ android:strokeColor="#33FFFFFF"
79
+ android:strokeWidth="0.8" />
80
+ <path
81
+ android:fillColor="#00000000"
82
+ android:pathData="M0,49L108,49"
83
+ android:strokeColor="#33FFFFFF"
84
+ android:strokeWidth="0.8" />
85
+ <path
86
+ android:fillColor="#00000000"
87
+ android:pathData="M0,59L108,59"
88
+ android:strokeColor="#33FFFFFF"
89
+ android:strokeWidth="0.8" />
90
+ <path
91
+ android:fillColor="#00000000"
92
+ android:pathData="M0,69L108,69"
93
+ android:strokeColor="#33FFFFFF"
94
+ android:strokeWidth="0.8" />
95
+ <path
96
+ android:fillColor="#00000000"
97
+ android:pathData="M0,79L108,79"
98
+ android:strokeColor="#33FFFFFF"
99
+ android:strokeWidth="0.8" />
100
+ <path
101
+ android:fillColor="#00000000"
102
+ android:pathData="M0,89L108,89"
103
+ android:strokeColor="#33FFFFFF"
104
+ android:strokeWidth="0.8" />
105
+ <path
106
+ android:fillColor="#00000000"
107
+ android:pathData="M0,99L108,99"
108
+ android:strokeColor="#33FFFFFF"
109
+ android:strokeWidth="0.8" />
110
+ <path
111
+ android:fillColor="#00000000"
112
+ android:pathData="M19,29L89,29"
113
+ android:strokeColor="#33FFFFFF"
114
+ android:strokeWidth="0.8" />
115
+ <path
116
+ android:fillColor="#00000000"
117
+ android:pathData="M19,39L89,39"
118
+ android:strokeColor="#33FFFFFF"
119
+ android:strokeWidth="0.8" />
120
+ <path
121
+ android:fillColor="#00000000"
122
+ android:pathData="M19,49L89,49"
123
+ android:strokeColor="#33FFFFFF"
124
+ android:strokeWidth="0.8" />
125
+ <path
126
+ android:fillColor="#00000000"
127
+ android:pathData="M19,59L89,59"
128
+ android:strokeColor="#33FFFFFF"
129
+ android:strokeWidth="0.8" />
130
+ <path
131
+ android:fillColor="#00000000"
132
+ android:pathData="M19,69L89,69"
133
+ android:strokeColor="#33FFFFFF"
134
+ android:strokeWidth="0.8" />
135
+ <path
136
+ android:fillColor="#00000000"
137
+ android:pathData="M19,79L89,79"
138
+ android:strokeColor="#33FFFFFF"
139
+ android:strokeWidth="0.8" />
140
+ <path
141
+ android:fillColor="#00000000"
142
+ android:pathData="M29,19L29,89"
143
+ android:strokeColor="#33FFFFFF"
144
+ android:strokeWidth="0.8" />
145
+ <path
146
+ android:fillColor="#00000000"
147
+ android:pathData="M39,19L39,89"
148
+ android:strokeColor="#33FFFFFF"
149
+ android:strokeWidth="0.8" />
150
+ <path
151
+ android:fillColor="#00000000"
152
+ android:pathData="M49,19L49,89"
153
+ android:strokeColor="#33FFFFFF"
154
+ android:strokeWidth="0.8" />
155
+ <path
156
+ android:fillColor="#00000000"
157
+ android:pathData="M59,19L59,89"
158
+ android:strokeColor="#33FFFFFF"
159
+ android:strokeWidth="0.8" />
160
+ <path
161
+ android:fillColor="#00000000"
162
+ android:pathData="M69,19L69,89"
163
+ android:strokeColor="#33FFFFFF"
164
+ android:strokeWidth="0.8" />
165
+ <path
166
+ android:fillColor="#00000000"
167
+ android:pathData="M79,19L79,89"
168
+ android:strokeColor="#33FFFFFF"
169
+ android:strokeWidth="0.8" />
170
+ </vector>
android/app/src/main/res/drawable/rectangle.xml ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="UTF-8"?>
2
+ <shape xmlns:android="http://schemas.android.com/apk/res/android"
3
+ android:id="@+id/listview_background_shape">
4
+ <stroke
5
+ android:width="1dp"
6
+ android:color="@android:color/darker_gray" />
7
+ <padding
8
+ android:bottom="2dp"
9
+ android:left="2dp"
10
+ android:right="2dp"
11
+ android:top="2dp" />
12
+ <solid android:color="#ffffffff" />
13
+ </shape>
android/app/src/main/res/layout/activity_main.xml ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="utf-8"?>
2
+ <androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
3
+ xmlns:app="http://schemas.android.com/apk/res-auto"
4
+ xmlns:tools="http://schemas.android.com/tools"
5
+ android:layout_width="match_parent"
6
+ android:layout_height="match_parent"
7
+ tools:context=".MainActivity">
8
+
9
+
10
+ <Button
11
+ android:id="@+id/cameraButton"
12
+ android:layout_width="wrap_content"
13
+ android:layout_height="wrap_content"
14
+ android:text="Camera"
15
+ app:layout_constraintBottom_toBottomOf="@+id/detectButton"
16
+ app:layout_constraintEnd_toEndOf="parent"
17
+ app:layout_constraintHorizontal_bias="0.655"
18
+ app:layout_constraintStart_toEndOf="@+id/detectButton"
19
+ app:layout_constraintTop_toTopOf="@+id/detectButton"
20
+ app:layout_constraintVertical_bias="0.0" />
21
+
22
+ <Button
23
+ android:id="@+id/detectButton"
24
+ android:layout_width="wrap_content"
25
+ android:layout_height="wrap_content"
26
+ android:layout_marginStart="80dp"
27
+ android:layout_marginBottom="126dp"
28
+ android:text="Detect"
29
+ app:layout_constraintBottom_toBottomOf="parent"
30
+ app:layout_constraintStart_toStartOf="parent"
31
+ app:layout_constraintTop_toBottomOf="@+id/imageView" />
32
+
33
+ <ImageView
34
+ android:id="@+id/imageView"
35
+ android:layout_width="416dp"
36
+ android:layout_height="416dp"
37
+ android:scaleType="fitStart"
38
+ app:layout_constraintBottom_toTopOf="@+id/detectButton"
39
+ app:layout_constraintEnd_toEndOf="parent"
40
+ app:layout_constraintStart_toStartOf="parent"
41
+ app:layout_constraintTop_toTopOf="parent"
42
+ tools:srcCompat="@drawable/kite" />
43
+
44
+ <org.tensorflow.lite.examples.detection.customview.OverlayView
45
+ android:id="@+id/tracking_overlay"
46
+ android:layout_width="416dp"
47
+ android:layout_height="416dp"
48
+ app:layout_constraintEnd_toEndOf="parent"
49
+ app:layout_constraintStart_toStartOf="parent"
50
+ app:layout_constraintTop_toTopOf="parent" />
51
+
52
+ </androidx.constraintlayout.widget.ConstraintLayout>
android/app/src/main/res/layout/tfe_od_activity_camera.xml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <?xml version="1.0" encoding="utf-8"?><!--
2
+ Copyright 2019 The TensorFlow Authors. All Rights Reserved.
3
+
4
+ Licensed under the Apache License, Version 2.0 (the "License");
5
+ you may not use this file except in compliance with the License.
6
+ You may obtain a copy of the License at
7
+
8
+ http://www.apache.org/licenses/LICENSE-2.0
9
+
10
+ Unless required by applicable law or agreed to in writing, software
11
+ distributed under the License is distributed on an "AS IS" BASIS,
12
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ See the License for the specific language governing permissions and
14
+ limitations under the License.
15
+ -->
16
+
17
+ <androidx.coordinatorlayout.widget.CoordinatorLayout xmlns:android="http://schemas.android.com/apk/res/android"
18
+ xmlns:tools="http://schemas.android.com/tools"
19
+ android:layout_width="match_parent"
20
+ android:layout_height="match_parent"
21
+ android:background="#00000000">
22
+
23
+ <RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
24
+ xmlns:tools="http://schemas.android.com/tools"
25
+ android:layout_width="match_parent"
26
+ android:layout_height="match_parent"
27
+ android:background="@android:color/black"
28
+ android:orientation="vertical">
29
+
30
+
31
+ <FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
32
+ xmlns:tools="http://schemas.android.com/tools"
33
+ android:id="@+id/container"
34
+ android:layout_width="match_parent"
35
+ android:layout_height="match_parent"
36
+ tools:context="org.tensorflow.demo.CameraActivity" />
37
+
38
+ <androidx.appcompat.widget.Toolbar
39
+ android:id="@+id/toolbar"
40
+ android:layout_width="match_parent"
41
+ android:layout_height="?attr/actionBarSize"
42
+ android:layout_alignParentTop="true"
43
+ android:background="@color/tfe_semi_transparent">
44
+
45
+ <ImageView
46
+ android:layout_width="wrap_content"
47
+ android:layout_height="wrap_content"
48
+ android:src="@drawable/tfl2_logo" />
49
+ </androidx.appcompat.widget.Toolbar>
50
+
51
+ </RelativeLayout>
52
+
53
+ <include
54
+ android:id="@+id/bottom_sheet_layout"
55
+ layout="@layout/tfe_od_layout_bottom_sheet" />
56
+ </androidx.coordinatorlayout.widget.CoordinatorLayout>