Commit 2756155a by wanglei

format

1 parent e08d779b
Showing with 109 additions and 786 deletions
...@@ -14,11 +14,9 @@ ...@@ -14,11 +14,9 @@
<uses-permission android:name="android.permission.INTERNET"/> <uses-permission android:name="android.permission.INTERNET"/>
<application <application
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@drawable/ic_launcher"
android:label="@string/tfe_od_app_name" android:label="@string/tfe_od_app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme.ObjectDetection"
android:hardwareAccelerated="true" android:hardwareAccelerated="true"
android:installLocation="internalOnly"> android:installLocation="internalOnly">
......
...@@ -56,14 +56,12 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -56,14 +56,12 @@ public class LegacyCameraConnectionFragment extends Fragment {
/** An {@link AutoFitTextureView} for camera preview. */ /** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView; private AutoFitTextureView textureView;
/** /**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link * {@link TextureView.SurfaceTextureListener} handles several lifecycle events
* TextureView}. * on a {@link TextureView}.
*/ */
private final TextureView.SurfaceTextureListener surfaceTextureListener = private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
new TextureView.SurfaceTextureListener() {
@Override @Override
public void onSurfaceTextureAvailable( public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId(); int index = getCameraId();
camera = Camera.open(index); camera = Camera.open(index);
...@@ -71,8 +69,7 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -71,8 +69,7 @@ public class LegacyCameraConnectionFragment extends Fragment {
try { try {
Camera.Parameters parameters = camera.getParameters(); Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes(); List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} }
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes(); List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
...@@ -81,9 +78,8 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -81,9 +78,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
for (Camera.Size size : cameraSizes) { for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height); sizes[i++] = new Size(size.width, size.height);
} }
Size previewSize = Size previewSize = CameraConnectionFragment.chooseOptimalSize(sizes, desiredSize.getWidth(),
CameraConnectionFragment.chooseOptimalSize( desiredSize.getHeight());
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90); camera.setDisplayOrientation(90);
camera.setParameters(parameters); camera.setParameters(parameters);
...@@ -102,8 +98,8 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -102,8 +98,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
} }
@Override @Override
public void onSurfaceTextureSizeChanged( public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
final SurfaceTexture texture, final int width, final int height) {} }
@Override @Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
...@@ -111,21 +107,22 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -111,21 +107,22 @@ public class LegacyCameraConnectionFragment extends Fragment {
} }
@Override @Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
}; };
/** An additional thread for running tasks that shouldn't block the UI. */ /** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread; private HandlerThread backgroundThread;
public LegacyCameraConnectionFragment( public LegacyCameraConnectionFragment(final Camera.PreviewCallback imageListener, final int layout,
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) { final Size desiredSize) {
this.imageListener = imageListener; this.imageListener = imageListener;
this.layout = layout; this.layout = layout;
this.desiredSize = desiredSize; this.desiredSize = desiredSize;
} }
@Override @Override
public View onCreateView( public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false); return inflater.inflate(layout, container, false);
} }
...@@ -143,9 +140,12 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -143,9 +140,12 @@ public class LegacyCameraConnectionFragment extends Fragment {
public void onResume() { public void onResume() {
super.onResume(); super.onResume();
startBackgroundThread(); startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already // When the screen is turned off and turned back on, the SurfaceTexture is
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // already
// a camera and start preview from here (otherwise, we wait until the surface is ready in // available, and "onSurfaceTextureAvailable" will not be called. In that case,
// we can open
// a camera and start preview from here (otherwise, we wait until the surface is
// ready in
// the SurfaceTextureListener). // the SurfaceTextureListener).
if (textureView.isAvailable()) { if (textureView.isAvailable()) {
...@@ -192,7 +192,8 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -192,7 +192,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
CameraInfo ci = new CameraInfo(); CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) { for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci); Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i; if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
return i;
} }
return -1; // No camera found return -1; // No camera found
} }
......
...@@ -47,8 +47,8 @@ import com.agenew.detection.tflite.YoloV5Classifier; ...@@ -47,8 +47,8 @@ import com.agenew.detection.tflite.YoloV5Classifier;
import com.agenew.detection.tracking.MultiBoxTracker; import com.agenew.detection.tracking.MultiBoxTracker;
/** /**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to
* objects. * detect and then track objects.
*/ */
public class MainActivity extends CameraActivity implements OnImageAvailableListener { public class MainActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
...@@ -82,9 +82,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -82,9 +82,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
@Override @Override
public void onPreviewSizeChosen(final Size size, final int rotation) { public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx = final float textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
TypedValue.applyDimension( getResources().getDisplayMetrics());
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx); borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE); borderedText.setTypeface(Typeface.MONOSPACE);
...@@ -100,9 +99,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -100,9 +99,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
} catch (final IOException e) { } catch (final IOException e) {
e.printStackTrace(); e.printStackTrace();
LOGGER.e(e, "Exception initializing classifier!"); LOGGER.e(e, "Exception initializing classifier!");
Toast toast = Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.makeText( Toast.LENGTH_SHORT);
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show(); toast.show();
finish(); finish();
} }
...@@ -125,18 +123,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -125,18 +123,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888); rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform = frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize, cropSize,
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT); sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix(); cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform); frameToCropTransform.invert(cropToFrameTransform);
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay); trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback( trackingOverlay.addCallback(new DrawCallback() {
new DrawCallback() {
@Override @Override
public void drawCallback(final Canvas canvas) { public void drawCallback(final Canvas canvas) {
tracker.draw(canvas); tracker.draw(canvas);
...@@ -157,8 +151,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -157,8 +151,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
final int numThreads = Integer.parseInt(threads); final int numThreads = Integer.parseInt(threads);
handler.post(() -> { handler.post(() -> {
if (modelIndex == currentModel && deviceIndex == currentDevice if (modelIndex == currentModel && deviceIndex == currentDevice && numThreads == currentNumThreads) {
&& numThreads == currentNumThreads) {
return; return;
} }
currentModel = modelIndex; currentModel = modelIndex;
...@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
if (detector == null) { if (detector == null) {
return; return;
} }
} } catch (IOException e) {
catch(IOException e) {
e.printStackTrace(); e.printStackTrace();
LOGGER.e(e, "Exception in updateActiveModel()"); LOGGER.e(e, "Exception in updateActiveModel()");
Toast toast = Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.makeText( Toast.LENGTH_SHORT);
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show(); toast.show();
finish(); finish();
} }
if (device.equals("CPU")) { if (device.equals("CPU")) {
detector.useCPU(); detector.useCPU();
} else if (device.equals("GPU")) { } else if (device.equals("GPU")) {
...@@ -209,11 +199,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -209,11 +199,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
int cropSize = detector.getInputSize(); int cropSize = detector.getInputSize();
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform = frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize,
ImageUtils.getTransformationMatrix( cropSize, sensorOrientation, MAINTAIN_ASPECT);
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix(); cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform); frameToCropTransform.invert(cropToFrameTransform);
...@@ -245,8 +232,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -245,8 +232,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
ImageUtils.saveBitmap(croppedBitmap); ImageUtils.saveBitmap(croppedBitmap);
} }
runInBackground( runInBackground(new Runnable() {
new Runnable() {
@Override @Override
public void run() { public void run() {
LOGGER.i("Running detection on image " + currTimestamp); LOGGER.i("Running detection on image " + currTimestamp);
...@@ -270,8 +256,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -270,8 +256,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
break; break;
} }
final List<Classifier.Recognition> mappedRecognitions = final List<Classifier.Recognition> mappedRecognitions = new LinkedList<>();
new LinkedList<Classifier.Recognition>();
for (final Classifier.Recognition result : results) { for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation(); final RectF location = result.getLocation();
...@@ -290,14 +275,10 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -290,14 +275,10 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
computingDetection = false; computingDetection = false;
runOnUiThread( runOnUiThread(() -> {
new Runnable() {
@Override
public void run() {
showFrameInfo(previewWidth + "x" + previewHeight); showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight()); showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms"); showInference(lastProcessingTimeMs + "ms");
}
}); });
} }
}); });
...@@ -313,18 +294,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -313,18 +294,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
return DESIRED_PREVIEW_SIZE; return DESIRED_PREVIEW_SIZE;
} }
// Which detection model to use: by default uses Tensorflow Object Detection API frozen // Which detection model to use: by default uses Tensorflow Object Detection API
// frozen
// checkpoints. // checkpoints.
private enum DetectorMode { private enum DetectorMode {
TF_OD_API; TF_OD_API;
} }
@Override @Override
protected void setUseNNAPI(final boolean isChecked) {
runInBackground(() -> detector.setUseNNAPI(isChecked));
}
@Override
protected void setNumThreads(final int numThreads) { protected void setNumThreads(final int numThreads) {
runInBackground(() -> detector.setNumThreads(numThreads)); runInBackground(() -> detector.setNumThreads(numThreads));
} }
......
...@@ -38,9 +38,10 @@ public class AutoFitTextureView extends TextureView { ...@@ -38,9 +38,10 @@ public class AutoFitTextureView extends TextureView {
} }
/** /**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio * Sets the aspect ratio for this view. The size of the view will be measured
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that is, * based on the ratio calculated from the parameters. Note that the actual sizes
* calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. * of parameters don't matter, that is, calling setAspectRatio(2, 3) and
* setAspectRatio(4, 6) make the same result.
* *
* @param width Relative horizontal size * @param width Relative horizontal size
* @param height Relative vertical size * @param height Relative vertical size
......
...@@ -18,22 +18,20 @@ package com.agenew.detection.env; ...@@ -18,22 +18,20 @@ package com.agenew.detection.env;
import android.graphics.Canvas; import android.graphics.Canvas;
import android.graphics.Color; import android.graphics.Color;
import android.graphics.Paint; import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style; import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface; import android.graphics.Typeface;
import java.util.Vector;
/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */ /**
* A class that encapsulates the tedious bits of rendering legible, bordered
* text onto a canvas.
*/
public class BorderedText { public class BorderedText {
private final Paint interiorPaint; private final Paint interiorPaint;
private final Paint exteriorPaint; private final Paint exteriorPaint;
private final float textSize;
/** /**
* Creates a left-aligned bordered text object with a white interior, and a black exterior with * Creates a left-aligned bordered text object with a white interior, and a
* the specified text size. * black exterior with the specified text size.
* *
* @param textSize text size in pixels * @param textSize text size in pixels
*/ */
...@@ -42,8 +40,8 @@ public class BorderedText { ...@@ -42,8 +40,8 @@ public class BorderedText {
} }
/** /**
* Create a bordered text object with the specified interior and exterior colors, text size and * Create a bordered text object with the specified interior and exterior
* alignment. * colors, text size and alignment.
* *
* @param interiorColor the interior text color * @param interiorColor the interior text color
* @param exteriorColor the exterior text color * @param exteriorColor the exterior text color
...@@ -64,8 +62,6 @@ public class BorderedText { ...@@ -64,8 +62,6 @@ public class BorderedText {
exteriorPaint.setStrokeWidth(textSize / 8); exteriorPaint.setStrokeWidth(textSize / 8);
exteriorPaint.setAntiAlias(false); exteriorPaint.setAntiAlias(false);
exteriorPaint.setAlpha(255); exteriorPaint.setAlpha(255);
this.textSize = textSize;
} }
public void setTypeface(Typeface typeface) { public void setTypeface(Typeface typeface) {
...@@ -78,8 +74,7 @@ public class BorderedText { ...@@ -78,8 +74,7 @@ public class BorderedText {
canvas.drawText(text, posX, posY, interiorPaint); canvas.drawText(text, posX, posY, interiorPaint);
} }
public void drawText( public void drawText(final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
float width = exteriorPaint.measureText(text); float width = exteriorPaint.measureText(text);
float textSize = exteriorPaint.getTextSize(); float textSize = exteriorPaint.getTextSize();
...@@ -90,39 +85,4 @@ public class BorderedText { ...@@ -90,39 +85,4 @@ public class BorderedText {
canvas.drawText(text, posX, (posY + textSize), interiorPaint); canvas.drawText(text, posX, (posY + textSize), interiorPaint);
} }
public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
int lineNum = 0;
for (final String line : lines) {
drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
++lineNum;
}
}
public void setInteriorColor(final int color) {
interiorPaint.setColor(color);
}
public void setExteriorColor(final int color) {
exteriorPaint.setColor(color);
}
public float getTextSize() {
return textSize;
}
public void setAlpha(final int alpha) {
interiorPaint.setAlpha(alpha);
exteriorPaint.setAlpha(alpha);
}
public void getTextBounds(
final String line, final int index, final int count, final Rect lineBounds) {
interiorPaint.getTextBounds(line, index, count, lineBounds);
}
public void setTextAlign(final Align align) {
interiorPaint.setTextAlign(align);
exteriorPaint.setTextAlign(align);
}
} }
...@@ -23,22 +23,22 @@ import java.io.FileOutputStream; ...@@ -23,22 +23,22 @@ import java.io.FileOutputStream;
/** Utility class for manipulating images. */ /** Utility class for manipulating images. */
public class ImageUtils { public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits. // are normalized to eight bits.
static final int kMaxChannelValue = 262143; static final int kMaxChannelValue = 262143;
@SuppressWarnings("unused")
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
/** /**
* Utility method to compute the allocated size in bytes of a YUV420SP image of the given * Utility method to compute the allocated size in bytes of a YUV420SP image of
* dimensions. * the given dimensions.
*/ */
public static int getYUVByteSize(final int width, final int height) { public static int getYUVByteSize(final int width, final int height) {
// The luminance plane requires 1 byte per pixel. // The luminance plane requires 1 byte per pixel.
final int ySize = width * height; final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded
// up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V. // Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
...@@ -61,8 +61,7 @@ public class ImageUtils { ...@@ -61,8 +61,7 @@ public class ImageUtils {
* @param filename The location to save the bitmap to. * @param filename The location to save the bitmap to.
*/ */
public static void saveBitmap(final Bitmap bitmap, final String filename) { public static void saveBitmap(final Bitmap bitmap, final String filename) {
final String root = final String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
final File myDir = new File(root); final File myDir = new File(root);
...@@ -128,16 +127,8 @@ public class ImageUtils { ...@@ -128,16 +127,8 @@ public class ImageUtils {
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
} }
public static void convertYUV420ToARGB8888( public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData, int width, int height,
byte[] yData, int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out) {
int yp = 0; int yp = 0;
for (int j = 0; j < height; j++) { for (int j = 0; j < height; j++) {
int pY = yRowStride * j; int pY = yRowStride * j;
...@@ -152,26 +143,21 @@ public class ImageUtils { ...@@ -152,26 +143,21 @@ public class ImageUtils {
} }
/** /**
* Returns a transformation matrix from one reference frame into another. Handles cropping (if * Returns a transformation matrix from one reference frame into another.
* maintaining aspect ratio is desired) and rotation. * Handles cropping (if maintaining aspect ratio is desired) and rotation.
* *
* @param srcWidth Width of source frame. * @param srcWidth Width of source frame.
* @param srcHeight Height of source frame. * @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame. * @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame. * @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple * @param applyRotation Amount of rotation to apply from one frame to
* of 90. * another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, * @param maintainAspectRatio If true, will ensure that scaling in x and y
* cropping the image if necessary. * remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements. * @return The transformation fulfilling the desired requirements.
*/ */
public static Matrix getTransformationMatrix( public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int srcWidth, final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix(); final Matrix matrix = new Matrix();
if (applyRotation != 0) { if (applyRotation != 0) {
......
...@@ -19,7 +19,10 @@ import android.util.Log; ...@@ -19,7 +19,10 @@ import android.util.Log;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */ /**
* Wrapper for the platform log function, allows convenient message prefixing
* and log disabling.
*/
public final class Logger { public final class Logger {
private static final String DEFAULT_TAG = "tensorflow"; private static final String DEFAULT_TAG = "tensorflow";
private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG; private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
...@@ -39,33 +42,18 @@ public final class Logger { ...@@ -39,33 +42,18 @@ public final class Logger {
private int minLogLevel = DEFAULT_MIN_LOG_LEVEL; private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
/** /**
* Creates a Logger using the class name as the message prefix. * Creates a Logger with a custom tag and a custom message prefix. If the
* message prefix is set to
* *
* @param clazz the simple name of this class is used as the message prefix. * <pre>
*/ * null
public Logger(final Class<?> clazz) { * </pre>
this(clazz.getSimpleName());
}
/**
* Creates a Logger using the specified message prefix.
*
* @param messagePrefix is prepended to the text of every message.
*/
public Logger(final String messagePrefix) {
this(DEFAULT_TAG, messagePrefix);
}
/**
* Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
*
* <pre>null</pre>
* *
* , the caller's class name is used as the prefix. * , the caller's class name is used as the prefix.
* *
* @param tag identifies the source of a log message. * @param tag identifies the source of a log message.
* @param messagePrefix prepended to every message if non-null. If null, the name of the caller is * @param messagePrefix prepended to every message if non-null. If null, the
* being used * name of the caller is being used
*/ */
public Logger(final String tag, final String messagePrefix) { public Logger(final String tag, final String messagePrefix) {
this.tag = tag; this.tag = tag;
...@@ -78,21 +66,17 @@ public final class Logger { ...@@ -78,21 +66,17 @@ public final class Logger {
this(DEFAULT_TAG, null); this(DEFAULT_TAG, null);
} }
/** Creates a Logger using the caller's class name as the message prefix. */
public Logger(final int minLogLevel) {
this(DEFAULT_TAG, null);
this.minLogLevel = minLogLevel;
}
/** /**
* Return caller's simple name. * Return caller's simple name.
* *
* <p>Android getStackTrace() returns an array that looks like this: stackTrace[0]: * <p>
* Android getStackTrace() returns an array that looks like this: stackTrace[0]:
* dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]: * dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
* com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]: * com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
* com.google.android.apps.unveil.BaseApplication * com.google.android.apps.unveil.BaseApplication
* *
* <p>This function returns the simple version of the first non-filtered name. * <p>
* This function returns the simple version of the first non-filtered name.
* *
* @return caller's simple name * @return caller's simple name
*/ */
...@@ -103,7 +87,8 @@ public final class Logger { ...@@ -103,7 +87,8 @@ public final class Logger {
for (final StackTraceElement elem : stackTrace) { for (final StackTraceElement elem : stackTrace) {
final String className = elem.getClassName(); final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) { if (!IGNORED_CLASS_NAMES.contains(className)) {
// We're only interested in the simple name of the class, not the complete package. // We're only interested in the simple name of the class, not the complete
// package.
final String[] classParts = className.split("\\."); final String[] classParts = className.split("\\.");
return classParts[classParts.length - 1]; return classParts[classParts.length - 1];
} }
...@@ -112,10 +97,6 @@ public final class Logger { ...@@ -112,10 +97,6 @@ public final class Logger {
return Logger.class.getSimpleName(); return Logger.class.getSimpleName();
} }
public void setMinLogLevel(final int minLogLevel) {
this.minLogLevel = minLogLevel;
}
public boolean isLoggable(final int logLevel) { public boolean isLoggable(final int logLevel) {
return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel); return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
} }
...@@ -130,24 +111,12 @@ public final class Logger { ...@@ -130,24 +111,12 @@ public final class Logger {
} }
} }
public void v(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args), t);
}
}
public void d(final String format, final Object... args) { public void d(final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) { if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args)); Log.d(tag, toMessage(format, args));
} }
} }
public void d(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args), t);
}
}
public void i(final String format, final Object... args) { public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) { if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args)); Log.i(tag, toMessage(format, args));
...@@ -166,12 +135,6 @@ public final class Logger { ...@@ -166,12 +135,6 @@ public final class Logger {
} }
} }
public void w(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args), t);
}
}
public void e(final String format, final Object... args) { public void e(final String format, final Object... args) {
if (isLoggable(Log.ERROR)) { if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args)); Log.e(tag, toMessage(format, args));
......
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Bitmap;
import android.text.TextUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/** Size class independent of a Camera object. */
public class Size implements Comparable<Size>, Serializable {
// 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
// upgrading.
public static final long serialVersionUID = 7689808733290872361L;
public final int width;
public final int height;
public Size(final int width, final int height) {
this.width = width;
this.height = height;
}
public Size(final Bitmap bmp) {
this.width = bmp.getWidth();
this.height = bmp.getHeight();
}
/**
* Rotate a size by the given number of degrees.
*
* @param size Size to rotate.
* @param rotation Degrees {0, 90, 180, 270} to rotate the size.
* @return Rotated size.
*/
public static Size getRotatedSize(final Size size, final int rotation) {
if (rotation % 180 != 0) {
// The phone is portrait, therefore the camera is sideways and frame should be rotated.
return new Size(size.height, size.width);
}
return size;
}
public static Size parseFromString(String sizeString) {
if (TextUtils.isEmpty(sizeString)) {
return null;
}
sizeString = sizeString.trim();
// The expected format is "<width>x<height>".
final String[] components = sizeString.split("x");
if (components.length == 2) {
try {
final int width = Integer.parseInt(components[0]);
final int height = Integer.parseInt(components[1]);
return new Size(width, height);
} catch (final NumberFormatException e) {
return null;
}
} else {
return null;
}
}
public static List<Size> sizeStringToList(final String sizes) {
final List<Size> sizeList = new ArrayList<Size>();
if (sizes != null) {
final String[] pairs = sizes.split(",");
for (final String pair : pairs) {
final Size size = Size.parseFromString(pair);
if (size != null) {
sizeList.add(size);
}
}
}
return sizeList;
}
public static String sizeListToString(final List<Size> sizes) {
String sizesString = "";
if (sizes != null && sizes.size() > 0) {
sizesString = sizes.get(0).toString();
for (int i = 1; i < sizes.size(); i++) {
sizesString += "," + sizes.get(i).toString();
}
}
return sizesString;
}
public static final String dimensionsAsString(final int width, final int height) {
return width + "x" + height;
}
public final float aspectRatio() {
return (float) width / (float) height;
}
@Override
public int compareTo(final Size other) {
return width * height - other.width * other.height;
}
@Override
public boolean equals(final Object other) {
if (other == null) {
return false;
}
if (!(other instanceof Size)) {
return false;
}
final Size otherSize = (Size) other;
return (width == otherSize.width && height == otherSize.height);
}
@Override
public int hashCode() {
return width * 32713 + height;
}
@Override
public String toString() {
return dimensionsAsString(width, height);
}
}
package com.agenew.detection.env; package com.agenew.detection.env;
import android.content.Context;
import android.content.res.AssetFileDescriptor; import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager; import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.os.Environment;
import android.util.Log; import android.util.Log;
import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.MappedByteBuffer; import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
...@@ -63,149 +54,4 @@ public class Utils { ...@@ -63,149 +54,4 @@ public class Utils {
return result; return result;
} }
public static void softmax(final float[] vals) {
float max = Float.NEGATIVE_INFINITY;
for (final float val : vals) {
max = Math.max(max, val);
}
float sum = 0.0f;
for (int i = 0; i < vals.length; ++i) {
vals[i] = (float) Math.exp(vals[i] - max);
sum += vals[i];
}
for (int i = 0; i < vals.length; ++i) {
vals[i] = vals[i] / sum;
}
}
public static float expit(final float x) {
return (float) (1. / (1. + Math.exp(-x)));
}
// public static Bitmap scale(Context context, String filePath) {
// AssetManager assetManager = context.getAssets();
//
// InputStream istr;
// Bitmap bitmap = null;
// try {
// istr = assetManager.open(filePath);
// bitmap = BitmapFactory.decodeStream(istr);
// bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
// } catch (IOException e) {
// // handle exception
// Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
// }
//
// return bitmap;
// }
public static Bitmap getBitmapFromAsset(Context context, String filePath) {
AssetManager assetManager = context.getAssets();
InputStream istr;
Bitmap bitmap = null;
try {
istr = assetManager.open(filePath);
bitmap = BitmapFactory.decodeStream(istr);
// return bitmap.copy(Bitmap.Config.ARGB_8888,true);
} catch (IOException e) {
// handle exception
Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
}
return bitmap;
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to
* another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y
* remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
public static Bitmap processBitmap(Bitmap source, int size) {
int image_height = source.getHeight();
int image_width = source.getWidth();
Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransformations = getTransformationMatrix(image_width, image_height, size, size, 0, false);
Matrix cropToFrameTransformations = new Matrix();
frameToCropTransformations.invert(cropToFrameTransformations);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(source, frameToCropTransformations, null);
return croppedBitmap;
}
public static void writeToFile(String data, Context context) {
try {
String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
String fileName = "myFile.txt";
File file = new File(baseDir + File.separator + fileName);
FileOutputStream stream = new FileOutputStream(file);
try {
stream.write(data.getBytes());
} finally {
stream.close();
}
} catch (IOException e) {
Log.e("Exception", "File write failed: " + e.toString());
}
}
} }
...@@ -34,8 +34,6 @@ public interface Classifier { ...@@ -34,8 +34,6 @@ public interface Classifier {
void setNumThreads(int num_threads); void setNumThreads(int num_threads);
void setUseNNAPI(boolean isChecked);
abstract float getObjThresh(); abstract float getObjThresh();
/** /**
......
...@@ -5,30 +5,19 @@ import android.content.res.AssetManager; ...@@ -5,30 +5,19 @@ import android.content.res.AssetManager;
import java.io.IOException; import java.io.IOException;
public class DetectorFactory { public class DetectorFactory {
public static YoloV5Classifier getDetector( public static YoloV5Classifier getDetector(final AssetManager assetManager, final String modelFilename)
final AssetManager assetManager,
final String modelFilename)
throws IOException { throws IOException {
String labelFilename = null; String labelFilename = null;
boolean isQuantized = false; boolean isQuantized = false;
int inputSize = 0; int inputSize = 0;
int[] output_width = new int[]{0};
int[][] masks = new int[][]{{0}};
int[] anchors = new int[]{0};
if (modelFilename.endsWith(".tflite")) { if (modelFilename.endsWith(".tflite")) {
labelFilename = "file:///android_asset/class.txt"; labelFilename = "file:///android_asset/class.txt";
isQuantized = modelFilename.endsWith("-int8.tflite"); isQuantized = modelFilename.endsWith("-int8.tflite");
inputSize = 640; inputSize = 640;
output_width = new int[]{80, 40, 20};
masks = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
anchors = new int[]{
10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
};
} }
return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, inputSize);
inputSize);
} }
} }
...@@ -35,27 +35,17 @@ import com.agenew.detection.env.ImageUtils; ...@@ -35,27 +35,17 @@ import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger; import com.agenew.detection.env.Logger;
import com.agenew.detection.tflite.Classifier.Recognition; import com.agenew.detection.tflite.Classifier.Recognition;
/** A tracker that handles non-max suppression and matches existing objects to new detections. */ /**
* A tracker that handles non-max suppression and matches existing objects to
* new detections.
*/
public class MultiBoxTracker { public class MultiBoxTracker {
private static final float TEXT_SIZE_DIP = 18; private static final float TEXT_SIZE_DIP = 18;
private static final float MIN_SIZE = 16.0f; private static final float MIN_SIZE = 16.0f;
private static final int[] COLORS = { private static final int[] COLORS = { Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA,
Color.BLUE, Color.WHITE, Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"),
Color.RED, Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"),
Color.GREEN, Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") };
Color.YELLOW,
Color.CYAN,
Color.MAGENTA,
Color.WHITE,
Color.parseColor("#55FF55"),
Color.parseColor("#FFA500"),
Color.parseColor("#FF8888"),
Color.parseColor("#AAAAFF"),
Color.parseColor("#FFFFAA"),
Color.parseColor("#55AAAA"),
Color.parseColor("#AA33AA"),
Color.parseColor("#0D0068")
};
final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>(); final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
private final Logger logger = new Logger(); private final Logger logger = new Logger();
private final Queue<Integer> availableColors = new LinkedList<Integer>(); private final Queue<Integer> availableColors = new LinkedList<Integer>();
...@@ -80,14 +70,12 @@ public class MultiBoxTracker { ...@@ -80,14 +70,12 @@ public class MultiBoxTracker {
boxPaint.setStrokeJoin(Join.ROUND); boxPaint.setStrokeJoin(Join.ROUND);
boxPaint.setStrokeMiter(100); boxPaint.setStrokeMiter(100);
textSizePx = textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
TypedValue.applyDimension( context.getResources().getDisplayMetrics());
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx); borderedText = new BorderedText(textSizePx);
} }
public synchronized void setFrameConfiguration( public synchronized void setFrameConfiguration(final int width, final int height, final int sensorOrientation) {
final int width, final int height, final int sensorOrientation) {
frameWidth = width; frameWidth = width;
frameHeight = height; frameHeight = height;
this.sensorOrientation = sensorOrientation; this.sensorOrientation = sensorOrientation;
...@@ -122,18 +110,11 @@ public class MultiBoxTracker { ...@@ -122,18 +110,11 @@ public class MultiBoxTracker {
public synchronized void draw(final Canvas canvas) { public synchronized void draw(final Canvas canvas) {
final boolean rotated = sensorOrientation % 180 == 90; final boolean rotated = sensorOrientation % 180 == 90;
final float multiplier = final float multiplier = Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
Math.min(
canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth)); canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
frameToCanvasMatrix = frameToCanvasMatrix = ImageUtils.getTransformationMatrix(frameWidth, frameHeight,
ImageUtils.getTransformationMatrix(
frameWidth,
frameHeight,
(int) (multiplier * (rotated ? frameHeight : frameWidth)), (int) (multiplier * (rotated ? frameHeight : frameWidth)),
(int) (multiplier * (rotated ? frameWidth : frameHeight)), (int) (multiplier * (rotated ? frameWidth : frameHeight)), sensorOrientation, false);
sensorOrientation,
false);
for (final TrackedRecognition recognition : trackedObjects) { for (final TrackedRecognition recognition : trackedObjects) {
final RectF trackedPos = new RectF(recognition.location); final RectF trackedPos = new RectF(recognition.location);
...@@ -143,14 +124,10 @@ public class MultiBoxTracker { ...@@ -143,14 +124,10 @@ public class MultiBoxTracker {
float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f; float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint); canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
final String labelString = final String labelString = !TextUtils.isEmpty(recognition.title)
!TextUtils.isEmpty(recognition.title)
? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence)) ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
: String.format("%.2f", (100 * recognition.detectionConfidence)); : String.format("%.2f", (100 * recognition.detectionConfidence));
// borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
// labelString);
borderedText.drawText(
canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
} }
} }
...@@ -169,8 +146,7 @@ public class MultiBoxTracker { ...@@ -169,8 +146,7 @@ public class MultiBoxTracker {
final RectF detectionScreenRect = new RectF(); final RectF detectionScreenRect = new RectF();
rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect); rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
logger.v( logger.v("Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
"Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect)); screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
...@@ -193,13 +169,8 @@ public class MultiBoxTracker { ...@@ -193,13 +169,8 @@ public class MultiBoxTracker {
trackedRecognition.detectionConfidence = potential.first; trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.location = new RectF(potential.second.getLocation()); trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.title = potential.second.getTitle(); trackedRecognition.title = potential.second.getTitle();
// trackedRecognition.color = COLORS[trackedObjects.size() % COLORS.length];
trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length]; trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length];
trackedObjects.add(trackedRecognition); trackedObjects.add(trackedRecognition);
// if (trackedObjects.size() >= COLORS.length) {
// break;
// }
} }
} }
......
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0"/>
<item
android:color="#00000000"
android:offset="1.0"/>
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<resources>
<!-- Base application theme. -->
<style name="AppTheme.ObjectDetection" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/tfe_color_primary</item>
<item name="colorPrimaryDark">@color/tfe_color_primary_dark</item>
<item name="colorAccent">@color/tfe_color_accent</item>
</style>
</resources>
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!