Commit 2756155a by wanglei

format

1 parent e08d779b
Showing with 2335 additions and 3165 deletions
...@@ -14,11 +14,9 @@ ...@@ -14,11 +14,9 @@
<uses-permission android:name="android.permission.INTERNET"/> <uses-permission android:name="android.permission.INTERNET"/>
<application <application
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@drawable/ic_launcher"
android:label="@string/tfe_od_app_name" android:label="@string/tfe_od_app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme.ObjectDetection"
android:hardwareAccelerated="true" android:hardwareAccelerated="true"
android:installLocation="internalOnly"> android:installLocation="internalOnly">
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
package com.agenew.detection; package com.agenew.detection;
import android.Manifest; import android.Manifest;
import android.app.Activity;
import android.app.Fragment; import android.app.Fragment;
import android.content.Context; import android.content.Context;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
...@@ -36,17 +37,12 @@ import android.os.Handler; ...@@ -36,17 +37,12 @@ import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.Trace; import android.os.Trace;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
//import androidx.appcompat.widget.Toolbar;
import android.util.Size; import android.util.Size;
import android.view.Surface; import android.view.Surface;
import android.view.View; import android.view.View;
import android.view.ViewTreeObserver; import android.view.ViewTreeObserver;
import android.view.WindowManager; import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter; import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ImageView; import android.widget.ImageView;
import android.widget.LinearLayout; import android.widget.LinearLayout;
import android.widget.ListView; import android.widget.ListView;
...@@ -61,564 +57,486 @@ import java.util.ArrayList; ...@@ -61,564 +57,486 @@ import java.util.ArrayList;
import com.agenew.detection.env.ImageUtils; import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger; import com.agenew.detection.env.Logger;
public abstract class CameraActivity extends AppCompatActivity public abstract class CameraActivity extends Activity
implements OnImageAvailableListener, implements OnImageAvailableListener, Camera.PreviewCallback, View.OnClickListener {
Camera.PreviewCallback, private static final Logger LOGGER = new Logger();
// CompoundButton.OnCheckedChangeListener,
View.OnClickListener { private static final int PERMISSIONS_REQUEST = 1;
private static final Logger LOGGER = new Logger();
private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
private static final int PERMISSIONS_REQUEST = 1; private static final String ASSET_PATH = "";
protected int previewWidth = 0;
private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA; protected int previewHeight = 0;
private static final String ASSET_PATH = ""; private boolean debug = false;
protected int previewWidth = 0; protected Handler handler;
protected int previewHeight = 0; private HandlerThread handlerThread;
private boolean debug = false; private boolean useCamera2API;
protected Handler handler; private boolean isProcessingFrame = false;
private HandlerThread handlerThread; private byte[][] yuvBytes = new byte[3][];
private boolean useCamera2API; private int[] rgbBytes = null;
private boolean isProcessingFrame = false; private int yRowStride;
private byte[][] yuvBytes = new byte[3][]; protected int defaultModelIndex = 0;
private int[] rgbBytes = null; protected int defaultDeviceIndex = 2;
private int yRowStride; private Runnable postInferenceCallback;
protected int defaultModelIndex = 0; private Runnable imageConverter;
protected int defaultDeviceIndex = 2; protected ArrayList<String> modelStrings = new ArrayList<>();
private Runnable postInferenceCallback;
private Runnable imageConverter; private LinearLayout bottomSheetLayout;
protected ArrayList<String> modelStrings = new ArrayList<String>(); private LinearLayout gestureLayout;
private BottomSheetBehavior<LinearLayout> sheetBehavior;
private LinearLayout bottomSheetLayout;
private LinearLayout gestureLayout; protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView;
private BottomSheetBehavior<LinearLayout> sheetBehavior; protected ImageView bottomSheetArrowImageView;
private ImageView plusImageView, minusImageView;
protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView; protected ListView deviceView;
protected ImageView bottomSheetArrowImageView; protected TextView threadsTextView;
private ImageView plusImageView, minusImageView; protected ListView modelView;
protected ListView deviceView; /** Current indices of device and model. */
protected TextView threadsTextView; int currentDevice = -1;
protected ListView modelView; int currentModel = -1;
/** Current indices of device and model. */ int currentNumThreads = -1;
int currentDevice = -1;
int currentModel = -1; ArrayList<String> deviceStrings = new ArrayList<>();
int currentNumThreads = -1;
@Override
ArrayList<String> deviceStrings = new ArrayList<String>(); protected void onCreate(final Bundle savedInstanceState) {
LOGGER.d("onCreate " + this);
@Override super.onCreate(null);
protected void onCreate(final Bundle savedInstanceState) { getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
LOGGER.d("onCreate " + this);
super.onCreate(null); setContentView(R.layout.tfe_od_activity_camera);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
if (hasPermission()) {
setContentView(R.layout.tfe_od_activity_camera); setFragment();
// Toolbar toolbar = findViewById(R.id.toolbar); } else {
// setSupportActionBar(toolbar); requestPermission();
// getSupportActionBar().setDisplayShowTitleEnabled(false); }
if (hasPermission()) { threadsTextView = findViewById(R.id.threads);
setFragment(); currentNumThreads = Integer.parseInt(threadsTextView.getText().toString().trim());
} else { plusImageView = findViewById(R.id.plus);
requestPermission(); minusImageView = findViewById(R.id.minus);
} deviceView = findViewById(R.id.device_list);
deviceStrings.add("CPU");
threadsTextView = findViewById(R.id.threads); deviceStrings.add("GPU");
currentNumThreads = Integer.parseInt(threadsTextView.getText().toString().trim()); deviceStrings.add("NNAPI");
plusImageView = findViewById(R.id.plus); deviceView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
minusImageView = findViewById(R.id.minus); ArrayAdapter<String> deviceAdapter = new ArrayAdapter<>(this, R.layout.deviceview_row,
deviceView = findViewById(R.id.device_list); R.id.deviceview_row_text, deviceStrings);
deviceStrings.add("CPU"); deviceView.setAdapter(deviceAdapter);
deviceStrings.add("GPU"); deviceView.setItemChecked(defaultDeviceIndex, true);
deviceStrings.add("NNAPI"); currentDevice = defaultDeviceIndex;
deviceView.setChoiceMode(ListView.CHOICE_MODE_SINGLE); deviceView.setOnItemClickListener((parent, view, position, id) -> updateActiveModel());
ArrayAdapter<String> deviceAdapter =
new ArrayAdapter<>( bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
CameraActivity.this , R.layout.deviceview_row, R.id.deviceview_row_text, deviceStrings); gestureLayout = findViewById(R.id.gesture_layout);
deviceView.setAdapter(deviceAdapter); sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout);
deviceView.setItemChecked(defaultDeviceIndex, true); bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow);
currentDevice = defaultDeviceIndex; modelView = findViewById((R.id.model_list));
deviceView.setOnItemClickListener(
new AdapterView.OnItemClickListener() { modelStrings = getModelStrings(getAssets(), ASSET_PATH);
@Override modelView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
public void onItemClick(AdapterView<?> parent, View view, int position, long id) { ArrayAdapter<String> modelAdapter = new ArrayAdapter<>(this, R.layout.listview_row,
updateActiveModel(); R.id.listview_row_text, modelStrings);
} modelView.setAdapter(modelAdapter);
}); modelView.setItemChecked(defaultModelIndex, true);
currentModel = defaultModelIndex;
bottomSheetLayout = findViewById(R.id.bottom_sheet_layout); modelView.setOnItemClickListener((parent, view, position, id) -> updateActiveModel());
gestureLayout = findViewById(R.id.gesture_layout);
sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout); ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow); vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
modelView = findViewById((R.id.model_list)); @Override
public void onGlobalLayout() {
modelStrings = getModelStrings(getAssets(), ASSET_PATH); if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
modelView.setChoiceMode(ListView.CHOICE_MODE_SINGLE); gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
ArrayAdapter<String> modelAdapter = } else {
new ArrayAdapter<>( gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
CameraActivity.this , R.layout.listview_row, R.id.listview_row_text, modelStrings); }
modelView.setAdapter(modelAdapter); int height = gestureLayout.getMeasuredHeight();
modelView.setItemChecked(defaultModelIndex, true);
currentModel = defaultModelIndex; sheetBehavior.setPeekHeight(height);
modelView.setOnItemClickListener( }
new AdapterView.OnItemClickListener() { });
@Override sheetBehavior.setHideable(false);
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel(); sheetBehavior.setBottomSheetCallback(new BottomSheetBehavior.BottomSheetCallback() {
} @Override
}); public void onStateChanged(@NonNull View bottomSheet, int newState) {
switch (newState) {
ViewTreeObserver vto = gestureLayout.getViewTreeObserver(); case BottomSheetBehavior.STATE_HIDDEN:
vto.addOnGlobalLayoutListener( break;
new ViewTreeObserver.OnGlobalLayoutListener() { case BottomSheetBehavior.STATE_EXPANDED: {
@Override bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
public void onGlobalLayout() { }
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) { break;
gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this); case BottomSheetBehavior.STATE_COLLAPSED: {
} else { bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this); }
} break;
// int width = bottomSheetLayout.getMeasuredWidth(); case BottomSheetBehavior.STATE_DRAGGING:
int height = gestureLayout.getMeasuredHeight(); break;
case BottomSheetBehavior.STATE_SETTLING:
sheetBehavior.setPeekHeight(height); bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
} break;
}); }
sheetBehavior.setHideable(false); }
sheetBehavior.setBottomSheetCallback( @Override
new BottomSheetBehavior.BottomSheetCallback() { public void onSlide(@NonNull View bottomSheet, float slideOffset) {
@Override }
public void onStateChanged(@NonNull View bottomSheet, int newState) { });
switch (newState) {
case BottomSheetBehavior.STATE_HIDDEN: frameValueTextView = findViewById(R.id.frame_info);
break; cropValueTextView = findViewById(R.id.crop_info);
case BottomSheetBehavior.STATE_EXPANDED: inferenceTimeTextView = findViewById(R.id.inference_info);
{
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down); plusImageView.setOnClickListener(this);
} minusImageView.setOnClickListener(this);
break; }
case BottomSheetBehavior.STATE_COLLAPSED:
{ protected ArrayList<String> getModelStrings(AssetManager mgr, String path) {
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up); ArrayList<String> res = new ArrayList<>();
} try {
break; String[] files = mgr.list(path);
case BottomSheetBehavior.STATE_DRAGGING: for (String file : files) {
break; String[] splits = file.split("\\.");
case BottomSheetBehavior.STATE_SETTLING: if (splits[splits.length - 1].equals("tflite")) {
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up); res.add(file);
break; }
} }
}
} catch (IOException e) {
@Override System.err.println("getModelStrings: " + e.getMessage());
public void onSlide(@NonNull View bottomSheet, float slideOffset) {} }
}); return res;
}
frameValueTextView = findViewById(R.id.frame_info);
cropValueTextView = findViewById(R.id.crop_info); protected int[] getRgbBytes() {
inferenceTimeTextView = findViewById(R.id.inference_info); imageConverter.run();
return rgbBytes;
plusImageView.setOnClickListener(this); }
minusImageView.setOnClickListener(this);
} /** Callback for android.hardware.Camera API */
@Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
if (isProcessingFrame) {
protected ArrayList<String> getModelStrings(AssetManager mgr, String path){ LOGGER.w("Dropping frame!");
ArrayList<String> res = new ArrayList<String>(); return;
try { }
String[] files = mgr.list(path);
for (String file : files) { try {
String[] splits = file.split("\\."); // Initialize the storage bitmaps once when the resolution is known.
if (splits[splits.length - 1].equals("tflite")) { if (rgbBytes == null) {
res.add(file); Camera.Size previewSize = camera.getParameters().getPreviewSize();
} previewHeight = previewSize.height;
} previewWidth = previewSize.width;
rgbBytes = new int[previewWidth * previewHeight];
} onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
catch (IOException e){ }
System.err.println("getModelStrings: " + e.getMessage()); } catch (final Exception e) {
} LOGGER.e(e, "Exception!");
return res; return;
} }
protected int[] getRgbBytes() { isProcessingFrame = true;
imageConverter.run(); yuvBytes[0] = bytes;
return rgbBytes; yRowStride = previewWidth;
}
imageConverter = () -> ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
protected int getLuminanceStride() {
return yRowStride; postInferenceCallback = new Runnable() {
} @Override
public void run() {
protected byte[] getLuminance() { camera.addCallbackBuffer(bytes);
return yuvBytes[0]; isProcessingFrame = false;
} }
};
/** Callback for android.hardware.Camera API */ processImage();
@Override }
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
if (isProcessingFrame) { /** Callback for Camera2 API */
LOGGER.w("Dropping frame!"); @Override
return; public void onImageAvailable(final ImageReader reader) {
} // We need wait until we have some size from onPreviewSizeChosen
if (previewWidth == 0 || previewHeight == 0) {
try { return;
// Initialize the storage bitmaps once when the resolution is known. }
if (rgbBytes == null) { if (rgbBytes == null) {
Camera.Size previewSize = camera.getParameters().getPreviewSize(); rgbBytes = new int[previewWidth * previewHeight];
previewHeight = previewSize.height; }
previewWidth = previewSize.width; try {
rgbBytes = new int[previewWidth * previewHeight]; final Image image = reader.acquireLatestImage();
onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
} if (image == null) {
} catch (final Exception e) { return;
LOGGER.e(e, "Exception!"); }
return;
} if (isProcessingFrame) {
image.close();
isProcessingFrame = true; return;
yuvBytes[0] = bytes; }
yRowStride = previewWidth; isProcessingFrame = true;
Trace.beginSection("imageAvailable");
imageConverter = final Plane[] planes = image.getPlanes();
new Runnable() { fillBytes(planes, yuvBytes);
@Override yRowStride = planes[0].getRowStride();
public void run() { final int uvRowStride = planes[1].getRowStride();
ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes); final int uvPixelStride = planes[1].getPixelStride();
}
}; imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], previewWidth,
previewHeight, yRowStride, uvRowStride, uvPixelStride, rgbBytes);
postInferenceCallback =
new Runnable() { postInferenceCallback = new Runnable() {
@Override @Override
public void run() { public void run() {
camera.addCallbackBuffer(bytes); image.close();
isProcessingFrame = false; isProcessingFrame = false;
} }
}; };
processImage();
} processImage();
} catch (final Exception e) {
/** Callback for Camera2 API */ LOGGER.e(e, "Exception!");
@Override Trace.endSection();
public void onImageAvailable(final ImageReader reader) { return;
// We need wait until we have some size from onPreviewSizeChosen }
if (previewWidth == 0 || previewHeight == 0) { Trace.endSection();
return; }
}
if (rgbBytes == null) { @Override
rgbBytes = new int[previewWidth * previewHeight]; public synchronized void onStart() {
} LOGGER.d("onStart " + this);
try { super.onStart();
final Image image = reader.acquireLatestImage(); }
if (image == null) { @Override
return; public synchronized void onResume() {
} LOGGER.d("onResume " + this);
super.onResume();
if (isProcessingFrame) {
image.close(); handlerThread = new HandlerThread("inference");
return; handlerThread.start();
} handler = new Handler(handlerThread.getLooper());
isProcessingFrame = true; }
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes(); @Override
fillBytes(planes, yuvBytes); public synchronized void onPause() {
yRowStride = planes[0].getRowStride(); LOGGER.d("onPause " + this);
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride(); handlerThread.quitSafely();
try {
imageConverter = handlerThread.join();
new Runnable() { handlerThread = null;
@Override handler = null;
public void run() { } catch (final InterruptedException e) {
ImageUtils.convertYUV420ToARGB8888( LOGGER.e(e, "Exception!");
yuvBytes[0], }
yuvBytes[1],
yuvBytes[2], super.onPause();
previewWidth, }
previewHeight,
yRowStride, @Override
uvRowStride, public synchronized void onStop() {
uvPixelStride, LOGGER.d("onStop " + this);
rgbBytes); super.onStop();
} }
};
@Override
postInferenceCallback = public synchronized void onDestroy() {
new Runnable() { LOGGER.d("onDestroy " + this);
@Override super.onDestroy();
public void run() { }
image.close();
isProcessingFrame = false; protected synchronized void runInBackground(final Runnable r) {
} if (handler != null) {
}; handler.post(r);
}
processImage(); }
} catch (final Exception e) {
LOGGER.e(e, "Exception!"); @Override
Trace.endSection(); public void onRequestPermissionsResult(final int requestCode, final String[] permissions,
return; final int[] grantResults) {
} super.onRequestPermissionsResult(requestCode, permissions, grantResults);
Trace.endSection(); if (requestCode == PERMISSIONS_REQUEST) {
} if (allPermissionsGranted(grantResults)) {
setFragment();
@Override } else {
public synchronized void onStart() { requestPermission();
LOGGER.d("onStart " + this); }
super.onStart(); }
} }
@Override private static boolean allPermissionsGranted(final int[] grantResults) {
public synchronized void onResume() { for (int result : grantResults) {
LOGGER.d("onResume " + this); if (result != PackageManager.PERMISSION_GRANTED) {
super.onResume(); return false;
}
handlerThread = new HandlerThread("inference"); }
handlerThread.start(); return true;
handler = new Handler(handlerThread.getLooper()); }
}
private boolean hasPermission() {
@Override if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
public synchronized void onPause() { return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;
LOGGER.d("onPause " + this); } else {
return true;
handlerThread.quitSafely(); }
try { }
handlerThread.join();
handlerThread = null; private void requestPermission() {
handler = null; if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
} catch (final InterruptedException e) { if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
LOGGER.e(e, "Exception!"); Toast.makeText(CameraActivity.this, "Camera permission is required for this demo", Toast.LENGTH_LONG)
} .show();
}
super.onPause(); requestPermissions(new String[] { PERMISSION_CAMERA }, PERMISSIONS_REQUEST);
} }
}
@Override
public synchronized void onStop() { // Returns true if the device supports the required hardware level, or better.
LOGGER.d("onStop " + this); private boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) {
super.onStop(); int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
} if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel;
@Override }
public synchronized void onDestroy() { // deviceLevel is not LEGACY, can use numerical sort
LOGGER.d("onDestroy " + this); return requiredLevel <= deviceLevel;
super.onDestroy(); }
}
private String chooseCamera() {
protected synchronized void runInBackground(final Runnable r) { final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
if (handler != null) { try {
handler.post(r); for (final String cameraId : manager.getCameraIdList()) {
} final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
}
// We don't use a front facing camera in this sample.
@Override final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
public void onRequestPermissionsResult( if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
final int requestCode, final String[] permissions, final int[] grantResults) { continue;
super.onRequestPermissionsResult(requestCode, permissions, grantResults); }
if (requestCode == PERMISSIONS_REQUEST) {
if (allPermissionsGranted(grantResults)) { final StreamConfigurationMap map = characteristics
setFragment(); .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
} else {
requestPermission(); if (map == null) {
} continue;
} }
}
// Fallback to camera1 API for internal cameras that don't have full support.
private static boolean allPermissionsGranted(final int[] grantResults) { // This should help with legacy situations where using the camera2 API causes
for (int result : grantResults) { // distorted or otherwise broken previews.
if (result != PackageManager.PERMISSION_GRANTED) { useCamera2API = (facing == CameraCharacteristics.LENS_FACING_EXTERNAL) || isHardwareLevelSupported(
return false; characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
} LOGGER.i("Camera API lv2?: %s", useCamera2API);
} return cameraId;
return true; }
} } catch (CameraAccessException e) {
LOGGER.e(e, "Not allowed to access camera");
private boolean hasPermission() { }
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED; return null;
} else { }
return true;
} protected void setFragment() {
} String cameraId = chooseCamera();
private void requestPermission() { Fragment fragment;
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (useCamera2API) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) { CameraConnectionFragment camera2Fragment = CameraConnectionFragment
Toast.makeText( .newInstance((size, rotation) -> {
CameraActivity.this, previewHeight = size.getHeight();
"Camera permission is required for this demo", previewWidth = size.getWidth();
Toast.LENGTH_LONG) CameraActivity.this.onPreviewSizeChosen(size, rotation);
.show(); }, this, getLayoutId(), getDesiredPreviewFrameSize());
}
requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST); camera2Fragment.setCamera(cameraId);
} fragment = camera2Fragment;
} } else {
fragment = new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
// Returns true if the device supports the required hardware level, or better. }
private boolean isHardwareLevelSupported(
CameraCharacteristics characteristics, int requiredLevel) { getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); }
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel; protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
} // Because of the variable row stride it's not possible to know in
// deviceLevel is not LEGACY, can use numerical sort // advance the actual necessary dimensions of the yuv planes.
return requiredLevel <= deviceLevel; for (int i = 0; i < planes.length; ++i) {
} final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) {
private String chooseCamera() { LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE); yuvBytes[i] = new byte[buffer.capacity()];
try { }
for (final String cameraId : manager.getCameraIdList()) { buffer.get(yuvBytes[i]);
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); }
}
// We don't use a front facing camera in this sample.
final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING); public boolean isDebug() {
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) { return debug;
continue; }
}
protected void readyForNextImage() {
final StreamConfigurationMap map = if (postInferenceCallback != null) {
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); postInferenceCallback.run();
}
if (map == null) { }
continue;
} protected int getScreenOrientation() {
switch (getWindowManager().getDefaultDisplay().getRotation()) {
// Fallback to camera1 API for internal cameras that don't have full support. case Surface.ROTATION_270:
// This should help with legacy situations where using the camera2 API causes return 270;
// distorted or otherwise broken previews. case Surface.ROTATION_180:
useCamera2API = return 180;
(facing == CameraCharacteristics.LENS_FACING_EXTERNAL) case Surface.ROTATION_90:
|| isHardwareLevelSupported( return 90;
characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); default:
LOGGER.i("Camera API lv2?: %s", useCamera2API); return 0;
return cameraId; }
} }
} catch (CameraAccessException e) {
LOGGER.e(e, "Not allowed to access camera"); @Override
} public void onClick(View v) {
if (v.getId() == R.id.plus) {
return null; String threads = threadsTextView.getText().toString().trim();
} int numThreads = Integer.parseInt(threads);
if (numThreads >= 9)
protected void setFragment() { return;
String cameraId = chooseCamera(); numThreads++;
threadsTextView.setText(String.valueOf(numThreads));
Fragment fragment; setNumThreads(numThreads);
if (useCamera2API) { } else if (v.getId() == R.id.minus) {
CameraConnectionFragment camera2Fragment = String threads = threadsTextView.getText().toString().trim();
CameraConnectionFragment.newInstance( int numThreads = Integer.parseInt(threads);
new CameraConnectionFragment.ConnectionCallback() { if (numThreads == 1) {
@Override return;
public void onPreviewSizeChosen(final Size size, final int rotation) { }
previewHeight = size.getHeight(); numThreads--;
previewWidth = size.getWidth(); threadsTextView.setText(String.valueOf(numThreads));
CameraActivity.this.onPreviewSizeChosen(size, rotation); setNumThreads(numThreads);
} }
}, }
this,
getLayoutId(), protected void showFrameInfo(String frameInfo) {
getDesiredPreviewFrameSize()); frameValueTextView.setText(frameInfo);
}
camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment; protected void showCropInfo(String cropInfo) {
} else { cropValueTextView.setText(cropInfo);
fragment = }
new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
} protected void showInference(String inferenceTime) {
inferenceTimeTextView.setText(inferenceTime);
getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit(); }
}
protected abstract void updateActiveModel();
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
// Because of the variable row stride it's not possible to know in protected abstract void processImage();
// advance the actual necessary dimensions of the yuv planes.
for (int i = 0; i < planes.length; ++i) { protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) { protected abstract int getLayoutId();
LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
yuvBytes[i] = new byte[buffer.capacity()]; protected abstract Size getDesiredPreviewFrameSize();
}
buffer.get(yuvBytes[i]); protected abstract void setNumThreads(int numThreads);
}
}
public boolean isDebug() {
return debug;
}
protected void readyForNextImage() {
if (postInferenceCallback != null) {
postInferenceCallback.run();
}
}
protected int getScreenOrientation() {
switch (getWindowManager().getDefaultDisplay().getRotation()) {
case Surface.ROTATION_270:
return 270;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_90:
return 90;
default:
return 0;
}
}
// @Override
// public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// setUseNNAPI(isChecked);
// if (isChecked) apiSwitchCompat.setText("NNAPI");
// else apiSwitchCompat.setText("TFLITE");
// }
@Override
public void onClick(View v) {
if (v.getId() == R.id.plus) {
String threads = threadsTextView.getText().toString().trim();
int numThreads = Integer.parseInt(threads);
if (numThreads >= 9) return;
numThreads++;
threadsTextView.setText(String.valueOf(numThreads));
setNumThreads(numThreads);
} else if (v.getId() == R.id.minus) {
String threads = threadsTextView.getText().toString().trim();
int numThreads = Integer.parseInt(threads);
if (numThreads == 1) {
return;
}
numThreads--;
threadsTextView.setText(String.valueOf(numThreads));
setNumThreads(numThreads);
}
}
protected void showFrameInfo(String frameInfo) {
frameValueTextView.setText(frameInfo);
}
protected void showCropInfo(String cropInfo) {
cropValueTextView.setText(cropInfo);
}
protected void showInference(String inferenceTime) {
inferenceTimeTextView.setText(inferenceTime);
}
protected abstract void updateActiveModel();
protected abstract void processImage();
protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
protected abstract int getLayoutId();
protected abstract Size getDesiredPreviewFrameSize();
protected abstract void setNumThreads(int numThreads);
protected abstract void setUseNNAPI(boolean isChecked);
} }
...@@ -23,7 +23,6 @@ import android.app.Dialog; ...@@ -23,7 +23,6 @@ import android.app.Dialog;
import android.app.DialogFragment; import android.app.DialogFragment;
import android.app.Fragment; import android.app.Fragment;
import android.content.Context; import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration; import android.content.res.Configuration;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.Matrix; import android.graphics.Matrix;
...@@ -64,506 +63,495 @@ import com.agenew.detection.env.Logger; ...@@ -64,506 +63,495 @@ import com.agenew.detection.env.Logger;
@SuppressLint("ValidFragment") @SuppressLint("ValidFragment")
public class CameraConnectionFragment extends Fragment { public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
/** /**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of * The camera preview size will be chosen to be the smallest frame by pixel size
* containing a DESIRED_SIZE x DESIRED_SIZE square. * capable of containing a DESIRED_SIZE x DESIRED_SIZE square.
*/ */
private static final int MINIMUM_PREVIEW_SIZE = 320; private static final int MINIMUM_PREVIEW_SIZE = 320;
/** Conversion from screen rotation to JPEG orientation. */ /** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String FRAGMENT_DIALOG = "dialog"; private static final String FRAGMENT_DIALOG = "dialog";
static { static {
ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180); ORIENTATIONS.append(Surface.ROTATION_270, 180);
} }
/** A {@link Semaphore} to prevent the app from exiting before closing the camera. */ /**
private final Semaphore cameraOpenCloseLock = new Semaphore(1); * A {@link Semaphore} to prevent the app from exiting before closing the
/** A {@link OnImageAvailableListener} to receive frames as they are available. */ * camera.
private final OnImageAvailableListener imageListener; */
/** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */ private final Semaphore cameraOpenCloseLock = new Semaphore(1);
private final Size inputSize; /**
/** The layout identifier to inflate for this Fragment. */ * A {@link OnImageAvailableListener} to receive frames as they are available.
private final int layout; */
private final OnImageAvailableListener imageListener;
private final ConnectionCallback cameraConnectionCallback; /**
private final CameraCaptureSession.CaptureCallback captureCallback = * The input size in pixels desired by TensorFlow (width and height of a square
new CameraCaptureSession.CaptureCallback() { * bitmap).
@Override */
public void onCaptureProgressed( private final Size inputSize;
final CameraCaptureSession session, /** The layout identifier to inflate for this Fragment. */
final CaptureRequest request, private final int layout;
final CaptureResult partialResult) {}
private final ConnectionCallback cameraConnectionCallback;
@Override private final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
public void onCaptureCompleted( @Override
final CameraCaptureSession session, public void onCaptureProgressed(final CameraCaptureSession session, final CaptureRequest request,
final CaptureRequest request, final CaptureResult partialResult) {
final TotalCaptureResult result) {} }
};
/** ID of the current {@link CameraDevice}. */ @Override
private String cameraId; public void onCaptureCompleted(final CameraCaptureSession session, final CaptureRequest request,
/** An {@link AutoFitTextureView} for camera preview. */ final TotalCaptureResult result) {
private AutoFitTextureView textureView; }
/** A {@link CameraCaptureSession } for camera preview. */ };
private CameraCaptureSession captureSession; /** ID of the current {@link CameraDevice}. */
/** A reference to the opened {@link CameraDevice}. */ private String cameraId;
private CameraDevice cameraDevice; /** An {@link AutoFitTextureView} for camera preview. */
/** The rotation in degrees of the camera sensor from the display. */ private AutoFitTextureView textureView;
private Integer sensorOrientation; /** A {@link CameraCaptureSession } for camera preview. */
/** The {@link Size} of camera preview. */ private CameraCaptureSession captureSession;
private Size previewSize; /** A reference to the opened {@link CameraDevice}. */
/** An additional thread for running tasks that shouldn't block the UI. */ private CameraDevice cameraDevice;
private HandlerThread backgroundThread; /** The rotation in degrees of the camera sensor from the display. */
/** A {@link Handler} for running tasks in the background. */ private Integer sensorOrientation;
private Handler backgroundHandler; /** The {@link Size} of camera preview. */
/** An {@link ImageReader} that handles preview frame capture. */ private Size previewSize;
private ImageReader previewReader; /** An additional thread for running tasks that shouldn't block the UI. */
/** {@link CaptureRequest.Builder} for the camera preview */ private HandlerThread backgroundThread;
private CaptureRequest.Builder previewRequestBuilder; /** A {@link Handler} for running tasks in the background. */
/** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */ private Handler backgroundHandler;
private CaptureRequest previewRequest; /** An {@link ImageReader} that handles preview frame capture. */
/** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ private ImageReader previewReader;
private final CameraDevice.StateCallback stateCallback = /** {@link CaptureRequest.Builder} for the camera preview */
new CameraDevice.StateCallback() { private CaptureRequest.Builder previewRequestBuilder;
@Override /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
public void onOpened(final CameraDevice cd) { private CaptureRequest previewRequest;
// This method is called when the camera is opened. We start camera preview here. /**
cameraOpenCloseLock.release(); * {@link CameraDevice.StateCallback} is called when {@link CameraDevice}
cameraDevice = cd; * changes its state.
createCameraPreviewSession(); */
} private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
@Override public void onOpened(final CameraDevice cd) {
public void onDisconnected(final CameraDevice cd) { // This method is called when the camera is opened. We start camera preview
cameraOpenCloseLock.release(); // here.
cd.close(); cameraOpenCloseLock.release();
cameraDevice = null; cameraDevice = cd;
} createCameraPreviewSession();
}
@Override
public void onError(final CameraDevice cd, final int error) { @Override
cameraOpenCloseLock.release(); public void onDisconnected(final CameraDevice cd) {
cd.close(); cameraOpenCloseLock.release();
cameraDevice = null; cd.close();
final Activity activity = getActivity(); cameraDevice = null;
if (null != activity) { }
activity.finish();
} @Override
} public void onError(final CameraDevice cd, final int error) {
}; cameraOpenCloseLock.release();
/** cd.close();
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link cameraDevice = null;
* TextureView}. final Activity activity = getActivity();
*/ if (null != activity) {
private final TextureView.SurfaceTextureListener surfaceTextureListener = activity.finish();
new TextureView.SurfaceTextureListener() { }
@Override }
public void onSurfaceTextureAvailable( };
final SurfaceTexture texture, final int width, final int height) { /**
openCamera(width, height); * {@link TextureView.SurfaceTextureListener} handles several lifecycle events
} * on a {@link TextureView}.
*/
@Override private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
public void onSurfaceTextureSizeChanged( @Override
final SurfaceTexture texture, final int width, final int height) { public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
configureTransform(width, height); openCamera(width, height);
} }
@Override @Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
return true; configureTransform(width, height);
} }
@Override @Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
}; return true;
}
private CameraConnectionFragment(
final ConnectionCallback connectionCallback, @Override
final OnImageAvailableListener imageListener, public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
final int layout, }
final Size inputSize) { };
this.cameraConnectionCallback = connectionCallback;
this.imageListener = imageListener; private CameraConnectionFragment(final ConnectionCallback connectionCallback,
this.layout = layout; final OnImageAvailableListener imageListener, final int layout, final Size inputSize) {
this.inputSize = inputSize; this.cameraConnectionCallback = connectionCallback;
} this.imageListener = imageListener;
this.layout = layout;
/** this.inputSize = inputSize;
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose }
* width and height are at least as large as the minimum of both, or an exact match if possible.
* /**
* @param choices The list of sizes that the camera supports for the intended output class * Given {@code choices} of {@code Size}s supported by a camera, chooses the
* @param width The minimum desired width * smallest one whose width and height are at least as large as the minimum of
* @param height The minimum desired height * both, or an exact match if possible.
* @return The optimal {@code Size}, or an arbitrary one if none were big enough *
*/ * @param choices The list of sizes that the camera supports for the intended
protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) { * output class
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE); * @param width The minimum desired width
final Size desiredSize = new Size(width, height); * @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
// Collect the supported resolutions that are at least as big as the preview Surface */
boolean exactSizeFound = false; protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final List<Size> bigEnough = new ArrayList<Size>(); final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final List<Size> tooSmall = new ArrayList<Size>(); final Size desiredSize = new Size(width, height);
for (final Size option : choices) {
if (option.equals(desiredSize)) { // Collect the supported resolutions that are at least as big as the preview
// Set the size but don't return yet so that remaining sizes will still be logged. // Surface
exactSizeFound = true; boolean exactSizeFound = false;
} final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>();
if (option.getHeight() >= minSize && option.getWidth() >= minSize) { for (final Size option : choices) {
bigEnough.add(option); if (option.equals(desiredSize)) {
} else { // Set the size but don't return yet so that remaining sizes will still be
tooSmall.add(option); // logged.
} exactSizeFound = true;
} }
LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize); if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]"); bigEnough.add(option);
LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]"); } else {
tooSmall.add(option);
if (exactSizeFound) { }
LOGGER.i("Exact size match found."); }
return desiredSize;
} LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
// Pick the smallest of those, assuming we found any LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
if (bigEnough.size() > 0) {
final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea()); if (exactSizeFound) {
LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight()); LOGGER.i("Exact size match found.");
return chosenSize; return desiredSize;
} else { }
LOGGER.e("Couldn't find any suitable preview size");
return choices[0]; // Pick the smallest of those, assuming we found any
} if (bigEnough.size() > 0) {
} final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
public static CameraConnectionFragment newInstance( return chosenSize;
final ConnectionCallback callback, } else {
final OnImageAvailableListener imageListener, LOGGER.e("Couldn't find any suitable preview size");
final int layout, return choices[0];
final Size inputSize) { }
return new CameraConnectionFragment(callback, imageListener, layout, inputSize); }
}
public static CameraConnectionFragment newInstance(final ConnectionCallback callback,
/** final OnImageAvailableListener imageListener, final int layout, final Size inputSize) {
* Shows a {@link Toast} on the UI thread. return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
* }
* @param text The message to show
*/ /**
private void showToast(final String text) { * Shows a {@link Toast} on the UI thread.
final Activity activity = getActivity(); *
if (activity != null) { * @param text The message to show
activity.runOnUiThread( */
new Runnable() { private void showToast(final String text) {
@Override final Activity activity = getActivity();
public void run() { if (activity != null) {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); activity.runOnUiThread(new Runnable() {
} @Override
}); public void run() {
} Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
} }
});
@Override }
public View onCreateView( }
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false); @Override
} public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
@Override return inflater.inflate(layout, container, false);
public void onViewCreated(final View view, final Bundle savedInstanceState) { }
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
} @Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
@Override textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
public void onActivityCreated(final Bundle savedInstanceState) { }
super.onActivityCreated(savedInstanceState);
} @Override
public void onActivityCreated(final Bundle savedInstanceState) {
@Override super.onActivityCreated(savedInstanceState);
public void onResume() { }
super.onResume();
startBackgroundThread(); @Override
public void onResume() {
// When the screen is turned off and turned back on, the SurfaceTexture is already super.onResume();
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open startBackgroundThread();
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener). // When the screen is turned off and turned back on, the SurfaceTexture is
if (textureView.isAvailable()) { // already
openCamera(textureView.getWidth(), textureView.getHeight()); // available, and "onSurfaceTextureAvailable" will not be called. In that case,
} else { // we can open
textureView.setSurfaceTextureListener(surfaceTextureListener); // a camera and start preview from here (otherwise, we wait until the surface is
} // ready in
} // the SurfaceTextureListener).
if (textureView.isAvailable()) {
@Override openCamera(textureView.getWidth(), textureView.getHeight());
public void onPause() { } else {
closeCamera(); textureView.setSurfaceTextureListener(surfaceTextureListener);
stopBackgroundThread(); }
super.onPause(); }
}
@Override
public void setCamera(String cameraId) { public void onPause() {
this.cameraId = cameraId; closeCamera();
} stopBackgroundThread();
super.onPause();
/** Sets up member variables related to camera. */ }
private void setUpCameraOutputs() {
final Activity activity = getActivity(); public void setCamera(String cameraId) {
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); this.cameraId = cameraId;
try { }
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
/** Sets up member variables related to camera. */
final StreamConfigurationMap map = private void setUpCameraOutputs() {
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); try {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of final StreamConfigurationMap map = characteristics
// garbage capture data. .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
previewSize =
chooseOptimalSize( sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(), // Danger, W.R.! Attempting to use too large a preview size could exceed the
inputSize.getHeight()); // camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// We fit the aspect ratio of TextureView to the size of preview we picked. // garbage capture data.
final int orientation = getResources().getConfiguration().orientation; previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), inputSize.getWidth(),
if (orientation == Configuration.ORIENTATION_LANDSCAPE) { inputSize.getHeight());
textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
} else { // We fit the aspect ratio of TextureView to the size of preview we picked.
textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth()); final int orientation = getResources().getConfiguration().orientation;
} if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
} catch (final CameraAccessException e) { textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
LOGGER.e(e, "Exception!"); } else {
} catch (final NullPointerException e) { textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
// Currently an NPE is thrown when the Camera2API is used but not supported on the }
// device this code runs. } catch (final CameraAccessException e) {
ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)) LOGGER.e(e, "Exception!");
.show(getChildFragmentManager(), FRAGMENT_DIALOG); } catch (final NullPointerException e) {
throw new IllegalStateException(getString(R.string.tfe_od_camera_error)); // Currently an NPE is thrown when the Camera2API is used but not supported on
} // the
// device this code runs.
cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation); ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)).show(getChildFragmentManager(),
} FRAGMENT_DIALOG);
throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
/** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */ }
private void openCamera(final int width, final int height) {
setUpCameraOutputs(); cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
configureTransform(width, height); }
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE); /** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */
try { private void openCamera(final int width, final int height) {
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) { setUpCameraOutputs();
throw new RuntimeException("Time out waiting to lock camera opening."); configureTransform(width, height);
} final Activity activity = getActivity();
manager.openCamera(cameraId, stateCallback, backgroundHandler); final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
} catch (final CameraAccessException e) { try {
LOGGER.e(e, "Exception!"); if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
} catch (final InterruptedException e) { throw new RuntimeException("Time out waiting to lock camera opening.");
throw new RuntimeException("Interrupted while trying to lock camera opening.", e); }
} manager.openCamera(cameraId, stateCallback, backgroundHandler);
} } catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
/** Closes the current {@link CameraDevice}. */ } catch (final InterruptedException e) {
private void closeCamera() { throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
try { }
cameraOpenCloseLock.acquire(); }
if (null != captureSession) {
captureSession.close(); /** Closes the current {@link CameraDevice}. */
captureSession = null; private void closeCamera() {
} try {
if (null != cameraDevice) { cameraOpenCloseLock.acquire();
cameraDevice.close(); if (null != captureSession) {
cameraDevice = null; captureSession.close();
} captureSession = null;
if (null != previewReader) { }
previewReader.close(); if (null != cameraDevice) {
previewReader = null; cameraDevice.close();
} cameraDevice = null;
} catch (final InterruptedException e) { }
throw new RuntimeException("Interrupted while trying to lock camera closing.", e); if (null != previewReader) {
} finally { previewReader.close();
cameraOpenCloseLock.release(); previewReader = null;
} }
} } catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
/** Starts a background thread and its {@link Handler}. */ } finally {
private void startBackgroundThread() { cameraOpenCloseLock.release();
backgroundThread = new HandlerThread("ImageListener"); }
backgroundThread.start(); }
backgroundHandler = new Handler(backgroundThread.getLooper());
} /** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() {
/** Stops the background thread and its {@link Handler}. */ backgroundThread = new HandlerThread("ImageListener");
private void stopBackgroundThread() { backgroundThread.start();
backgroundThread.quitSafely(); backgroundHandler = new Handler(backgroundThread.getLooper());
try { }
backgroundThread.join();
backgroundThread = null; /** Stops the background thread and its {@link Handler}. */
backgroundHandler = null; private void stopBackgroundThread() {
} catch (final InterruptedException e) { backgroundThread.quitSafely();
LOGGER.e(e, "Exception!"); try {
} backgroundThread.join();
} backgroundThread = null;
backgroundHandler = null;
/** Creates a new {@link CameraCaptureSession} for camera preview. */ } catch (final InterruptedException e) {
private void createCameraPreviewSession() { LOGGER.e(e, "Exception!");
try { }
final SurfaceTexture texture = textureView.getSurfaceTexture(); }
assert texture != null;
/** Creates a new {@link CameraCaptureSession} for camera preview. */
// We configure the size of default buffer to be the size of camera preview we want. private void createCameraPreviewSession() {
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); try {
final SurfaceTexture texture = textureView.getSurfaceTexture();
// This is the output Surface we need to start preview. assert texture != null;
final Surface surface = new Surface(texture);
// We configure the size of default buffer to be the size of camera preview we
// We set up a CaptureRequest.Builder with the output Surface. // want.
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
previewRequestBuilder.addTarget(surface);
// This is the output Surface we need to start preview.
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight()); final Surface surface = new Surface(texture);
// Create the reader for the preview frames. // We set up a CaptureRequest.Builder with the output Surface.
previewReader = previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
ImageReader.newInstance( previewRequestBuilder.addTarget(surface);
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
previewRequestBuilder.addTarget(previewReader.getSurface()); // Create the reader for the preview frames.
previewReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(),
// Here, we create a CameraCaptureSession for camera preview. ImageFormat.YUV_420_888, 2);
cameraDevice.createCaptureSession(
Arrays.asList(surface, previewReader.getSurface()), previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
new CameraCaptureSession.StateCallback() { previewRequestBuilder.addTarget(previewReader.getSurface());
@Override // Here, we create a CameraCaptureSession for camera preview.
public void onConfigured(final CameraCaptureSession cameraCaptureSession) { cameraDevice.createCaptureSession(Arrays.asList(surface, previewReader.getSurface()),
// The camera is already closed new CameraCaptureSession.StateCallback() {
if (null == cameraDevice) {
return; @Override
} public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
// When the session is ready, we start displaying the preview. if (null == cameraDevice) {
captureSession = cameraCaptureSession; return;
try { }
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set( // When the session is ready, we start displaying the preview.
CaptureRequest.CONTROL_AF_MODE, captureSession = cameraCaptureSession;
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); try {
// Flash is automatically enabled when necessary. // Auto focus should be continuous for camera preview.
previewRequestBuilder.set( previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
// Finally, we start displaying the camera preview. previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
previewRequest = previewRequestBuilder.build(); CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
captureSession.setRepeatingRequest(
previewRequest, captureCallback, backgroundHandler); // Finally, we start displaying the camera preview.
} catch (final CameraAccessException e) { previewRequest = previewRequestBuilder.build();
LOGGER.e(e, "Exception!"); captureSession.setRepeatingRequest(previewRequest, captureCallback, backgroundHandler);
} } catch (final CameraAccessException e) {
} LOGGER.e(e, "Exception!");
}
@Override }
public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
showToast("Failed"); @Override
} public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
}, showToast("Failed");
null); }
} catch (final CameraAccessException e) { }, null);
LOGGER.e(e, "Exception!"); } catch (final CameraAccessException e) {
} LOGGER.e(e, "Exception!");
} }
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be /**
* called after the camera preview size is determined in setUpCameraOutputs and also the size of * Configures the necessary {@link Matrix} transformation to `mTextureView`.
* `mTextureView` is fixed. * This method should be called after the camera preview size is determined in
* * setUpCameraOutputs and also the size of `mTextureView` is fixed.
* @param viewWidth The width of `mTextureView` *
* @param viewHeight The height of `mTextureView` * @param viewWidth The width of `mTextureView`
*/ * @param viewHeight The height of `mTextureView`
private void configureTransform(final int viewWidth, final int viewHeight) { */
final Activity activity = getActivity(); private void configureTransform(final int viewWidth, final int viewHeight) {
if (null == textureView || null == previewSize || null == activity) { final Activity activity = getActivity();
return; if (null == textureView || null == previewSize || null == activity) {
} return;
final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); }
final Matrix matrix = new Matrix(); final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight); final Matrix matrix = new Matrix();
final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth()); final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
final float centerX = viewRect.centerX(); final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
final float centerY = viewRect.centerY(); final float centerX = viewRect.centerX();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { final float centerY = viewRect.centerY();
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
final float scale = matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
Math.max( final float scale = Math.max((float) viewHeight / previewSize.getHeight(),
(float) viewHeight / previewSize.getHeight(), (float) viewWidth / previewSize.getWidth());
(float) viewWidth / previewSize.getWidth()); matrix.postScale(scale, scale, centerX, centerY);
matrix.postScale(scale, scale, centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY); } else if (Surface.ROTATION_180 == rotation) {
} else if (Surface.ROTATION_180 == rotation) { matrix.postRotate(180, centerX, centerY);
matrix.postRotate(180, centerX, centerY); }
} textureView.setTransform(matrix);
textureView.setTransform(matrix); }
}
/**
/** * Callback for Activities to use to initialize their data once the selected
* Callback for Activities to use to initialize their data once the selected preview size is * preview size is known.
* known. */
*/ public interface ConnectionCallback {
public interface ConnectionCallback { void onPreviewSizeChosen(Size size, int cameraRotation);
void onPreviewSizeChosen(Size size, int cameraRotation); }
}
/** Compares two {@code Size}s based on their areas. */
/** Compares two {@code Size}s based on their areas. */ static class CompareSizesByArea implements Comparator<Size> {
static class CompareSizesByArea implements Comparator<Size> { @Override
@Override public int compare(final Size lhs, final Size rhs) {
public int compare(final Size lhs, final Size rhs) { // We cast here to ensure the multiplications won't overflow
// We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
return Long.signum( }
(long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); }
}
} /** Shows an error message dialog. */
public static class ErrorDialog extends DialogFragment {
/** Shows an error message dialog. */ private static final String ARG_MESSAGE = "message";
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message"; public static ErrorDialog newInstance(final String message) {
final ErrorDialog dialog = new ErrorDialog();
public static ErrorDialog newInstance(final String message) { final Bundle args = new Bundle();
final ErrorDialog dialog = new ErrorDialog(); args.putString(ARG_MESSAGE, message);
final Bundle args = new Bundle(); dialog.setArguments(args);
args.putString(ARG_MESSAGE, message); return dialog;
dialog.setArguments(args); }
return dialog;
} @Override
public Dialog onCreateDialog(final Bundle savedInstanceState) {
@Override final Activity activity = getActivity();
public Dialog onCreateDialog(final Bundle savedInstanceState) { return new AlertDialog.Builder(activity).setMessage(getArguments().getString(ARG_MESSAGE))
final Activity activity = getActivity(); .setPositiveButton(android.R.string.ok, (dialogInterface, i) -> activity.finish()).create();
return new AlertDialog.Builder(activity) }
.setMessage(getArguments().getString(ARG_MESSAGE)) }
.setPositiveButton(
android.R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialogInterface, final int i) {
activity.finish();
}
})
.create();
}
}
} }
...@@ -37,163 +37,164 @@ import com.agenew.detection.env.ImageUtils; ...@@ -37,163 +37,164 @@ import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger; import com.agenew.detection.env.Logger;
public class LegacyCameraConnectionFragment extends Fragment { public class LegacyCameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
/** Conversion from screen rotation to JPEG orientation. */ /** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray(); private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static { static {
ORIENTATIONS.append(Surface.ROTATION_0, 90); ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0); ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270); ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180); ORIENTATIONS.append(Surface.ROTATION_270, 180);
} }
private Camera camera; private Camera camera;
private Camera.PreviewCallback imageListener; private Camera.PreviewCallback imageListener;
private Size desiredSize; private Size desiredSize;
/** The layout identifier to inflate for this Fragment. */ /** The layout identifier to inflate for this Fragment. */
private int layout; private int layout;
/** An {@link AutoFitTextureView} for camera preview. */ /** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView; private AutoFitTextureView textureView;
/** /**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link * {@link TextureView.SurfaceTextureListener} handles several lifecycle events
* TextureView}. * on a {@link TextureView}.
*/ */
private final TextureView.SurfaceTextureListener surfaceTextureListener = private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
new TextureView.SurfaceTextureListener() { @Override
@Override public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) { int index = getCameraId();
camera = Camera.open(index);
int index = getCameraId();
camera = Camera.open(index); try {
Camera.Parameters parameters = camera.getParameters();
try { List<String> focusModes = parameters.getSupportedFocusModes();
Camera.Parameters parameters = camera.getParameters(); if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
List<String> focusModes = parameters.getSupportedFocusModes(); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
if (focusModes != null }
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) { List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); Size[] sizes = new Size[cameraSizes.size()];
} int i = 0;
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes(); for (Camera.Size size : cameraSizes) {
Size[] sizes = new Size[cameraSizes.size()]; sizes[i++] = new Size(size.width, size.height);
int i = 0; }
for (Camera.Size size : cameraSizes) { Size previewSize = CameraConnectionFragment.chooseOptimalSize(sizes, desiredSize.getWidth(),
sizes[i++] = new Size(size.width, size.height); desiredSize.getHeight());
} parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
Size previewSize = camera.setDisplayOrientation(90);
CameraConnectionFragment.chooseOptimalSize( camera.setParameters(parameters);
sizes, desiredSize.getWidth(), desiredSize.getHeight()); camera.setPreviewTexture(texture);
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); } catch (IOException exception) {
camera.setDisplayOrientation(90); camera.release();
camera.setParameters(parameters); }
camera.setPreviewTexture(texture);
} catch (IOException exception) { camera.setPreviewCallbackWithBuffer(imageListener);
camera.release(); Camera.Size s = camera.getParameters().getPreviewSize();
} camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
camera.setPreviewCallbackWithBuffer(imageListener); textureView.setAspectRatio(s.height, s.width);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]); camera.startPreview();
}
textureView.setAspectRatio(s.height, s.width);
@Override
camera.startPreview(); public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
} }
@Override @Override
public void onSurfaceTextureSizeChanged( public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
final SurfaceTexture texture, final int width, final int height) {} return true;
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { @Override
return true; public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
} }
};
@Override /** An additional thread for running tasks that shouldn't block the UI. */
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} private HandlerThread backgroundThread;
};
/** An additional thread for running tasks that shouldn't block the UI. */ public LegacyCameraConnectionFragment(final Camera.PreviewCallback imageListener, final int layout,
private HandlerThread backgroundThread; final Size desiredSize) {
this.imageListener = imageListener;
public LegacyCameraConnectionFragment( this.layout = layout;
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) { this.desiredSize = desiredSize;
this.imageListener = imageListener; }
this.layout = layout;
this.desiredSize = desiredSize; @Override
} public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
@Override return inflater.inflate(layout, container, false);
public View onCreateView( }
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false); @Override
} public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
@Override }
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture); @Override
} public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
@Override }
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState); @Override
} public void onResume() {
super.onResume();
@Override startBackgroundThread();
public void onResume() { // When the screen is turned off and turned back on, the SurfaceTexture is
super.onResume(); // already
startBackgroundThread(); // available, and "onSurfaceTextureAvailable" will not be called. In that case,
// When the screen is turned off and turned back on, the SurfaceTexture is already // we can open
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // a camera and start preview from here (otherwise, we wait until the surface is
// a camera and start preview from here (otherwise, we wait until the surface is ready in // ready in
// the SurfaceTextureListener). // the SurfaceTextureListener).
if (textureView.isAvailable()) { if (textureView.isAvailable()) {
camera.startPreview(); camera.startPreview();
} else { } else {
textureView.setSurfaceTextureListener(surfaceTextureListener); textureView.setSurfaceTextureListener(surfaceTextureListener);
} }
} }
@Override @Override
public void onPause() { public void onPause() {
stopCamera(); stopCamera();
stopBackgroundThread(); stopBackgroundThread();
super.onPause(); super.onPause();
} }
/** Starts a background thread and its {@link Handler}. */ /** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() { private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground"); backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start(); backgroundThread.start();
} }
/** Stops the background thread and its {@link Handler}. */ /** Stops the background thread and its {@link Handler}. */
private void stopBackgroundThread() { private void stopBackgroundThread() {
backgroundThread.quitSafely(); backgroundThread.quitSafely();
try { try {
backgroundThread.join(); backgroundThread.join();
backgroundThread = null; backgroundThread = null;
} catch (final InterruptedException e) { } catch (final InterruptedException e) {
LOGGER.e(e, "Exception!"); LOGGER.e(e, "Exception!");
} }
} }
protected void stopCamera() { protected void stopCamera() {
if (camera != null) { if (camera != null) {
camera.stopPreview(); camera.stopPreview();
camera.setPreviewCallback(null); camera.setPreviewCallback(null);
camera.release(); camera.release();
camera = null; camera = null;
} }
} }
private int getCameraId() { private int getCameraId() {
CameraInfo ci = new CameraInfo(); CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) { for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci); Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i; if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
} return i;
return -1; // No camera found }
} return -1; // No camera found
}
} }
...@@ -47,118 +47,111 @@ import com.agenew.detection.tflite.YoloV5Classifier; ...@@ -47,118 +47,111 @@ import com.agenew.detection.tflite.YoloV5Classifier;
import com.agenew.detection.tracking.MultiBoxTracker; import com.agenew.detection.tracking.MultiBoxTracker;
/** /**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to
* objects. * detect and then track objects.
*/ */
public class MainActivity extends CameraActivity implements OnImageAvailableListener { public class MainActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
private static final DetectorMode MODE = DetectorMode.TF_OD_API; private static final DetectorMode MODE = DetectorMode.TF_OD_API;
public static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.3f; public static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.3f;
private static final boolean MAINTAIN_ASPECT = true; private static final boolean MAINTAIN_ASPECT = true;
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 640); private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 640);
private static final boolean SAVE_PREVIEW_BITMAP = false; private static final boolean SAVE_PREVIEW_BITMAP = false;
private static final float TEXT_SIZE_DIP = 10; private static final float TEXT_SIZE_DIP = 10;
OverlayView trackingOverlay; OverlayView trackingOverlay;
private Integer sensorOrientation; private Integer sensorOrientation;
private YoloV5Classifier detector; private YoloV5Classifier detector;
private long lastProcessingTimeMs; private long lastProcessingTimeMs;
private Bitmap rgbFrameBitmap = null; private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null; private Bitmap croppedBitmap = null;
private Bitmap cropCopyBitmap = null; private Bitmap cropCopyBitmap = null;
private boolean computingDetection = false; private boolean computingDetection = false;
private long timestamp = 0; private long timestamp = 0;
private Matrix frameToCropTransform; private Matrix frameToCropTransform;
private Matrix cropToFrameTransform; private Matrix cropToFrameTransform;
private MultiBoxTracker tracker; private MultiBoxTracker tracker;
private BorderedText borderedText; private BorderedText borderedText;
@Override @Override
public void onPreviewSizeChosen(final Size size, final int rotation) { public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx = final float textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
TypedValue.applyDimension( getResources().getDisplayMetrics());
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics()); borderedText = new BorderedText(textSizePx);
borderedText = new BorderedText(textSizePx); borderedText.setTypeface(Typeface.MONOSPACE);
borderedText.setTypeface(Typeface.MONOSPACE);
tracker = new MultiBoxTracker(this);
tracker = new MultiBoxTracker(this);
final int modelIndex = modelView.getCheckedItemPosition();
final int modelIndex = modelView.getCheckedItemPosition(); final String modelString = modelStrings.get(modelIndex);
final String modelString = modelStrings.get(modelIndex); final int deviceIndex = deviceView.getCheckedItemPosition();
final int deviceIndex = deviceView.getCheckedItemPosition(); String device = deviceStrings.get(deviceIndex);
String device = deviceStrings.get(deviceIndex);
try {
try { detector = DetectorFactory.getDetector(getAssets(), modelString);
detector = DetectorFactory.getDetector(getAssets(), modelString); } catch (final IOException e) {
} catch (final IOException e) { e.printStackTrace();
e.printStackTrace(); LOGGER.e(e, "Exception initializing classifier!");
LOGGER.e(e, "Exception initializing classifier!"); Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast toast = Toast.LENGTH_SHORT);
Toast.makeText( toast.show();
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT); finish();
toast.show(); }
finish(); if (device.equals("CPU")) {
} detector.useCPU();
if (device.equals("CPU")) { } else if (device.equals("GPU")) {
detector.useCPU(); detector.useGpu();
} else if (device.equals("GPU")) { } else if (device.equals("NNAPI")) {
detector.useGpu(); detector.useNNAPI();
} else if (device.equals("NNAPI")) { }
detector.useNNAPI(); int cropSize = detector.getInputSize();
}
int cropSize = detector.getInputSize(); previewWidth = size.getWidth();
previewHeight = size.getHeight();
previewWidth = size.getWidth();
previewHeight = size.getHeight(); sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation); LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
frameToCropTransform =
ImageUtils.getTransformationMatrix( cropToFrameTransform = new Matrix();
previewWidth, previewHeight, frameToCropTransform.invert(cropToFrameTransform);
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT); trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback(new DrawCallback() {
cropToFrameTransform = new Matrix(); @Override
frameToCropTransform.invert(cropToFrameTransform); public void drawCallback(final Canvas canvas) {
tracker.draw(canvas);
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay); if (isDebug()) {
trackingOverlay.addCallback( tracker.drawDebug(canvas);
new DrawCallback() { }
@Override }
public void drawCallback(final Canvas canvas) { });
tracker.draw(canvas);
if (isDebug()) { tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation);
tracker.drawDebug(canvas); }
}
} protected void updateActiveModel() {
}); // Get UI information before delegating to background
final int modelIndex = modelView.getCheckedItemPosition();
tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation); final int deviceIndex = deviceView.getCheckedItemPosition();
} String threads = threadsTextView.getText().toString().trim();
final int numThreads = Integer.parseInt(threads);
protected void updateActiveModel() {
// Get UI information before delegating to background handler.post(() -> {
final int modelIndex = modelView.getCheckedItemPosition(); if (modelIndex == currentModel && deviceIndex == currentDevice && numThreads == currentNumThreads) {
final int deviceIndex = deviceView.getCheckedItemPosition();
String threads = threadsTextView.getText().toString().trim();
final int numThreads = Integer.parseInt(threads);
handler.post(() -> {
if (modelIndex == currentModel && deviceIndex == currentDevice
&& numThreads == currentNumThreads) {
return; return;
} }
currentModel = modelIndex; currentModel = modelIndex;
...@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
if (detector == null) { if (detector == null) {
return; return;
} }
} } catch (IOException e) {
catch(IOException e) {
e.printStackTrace(); e.printStackTrace();
LOGGER.e(e, "Exception in updateActiveModel()"); LOGGER.e(e, "Exception in updateActiveModel()");
Toast toast = Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.makeText( Toast.LENGTH_SHORT);
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show(); toast.show();
finish(); finish();
} }
if (device.equals("CPU")) { if (device.equals("CPU")) {
detector.useCPU(); detector.useCPU();
} else if (device.equals("GPU")) { } else if (device.equals("GPU")) {
...@@ -209,123 +199,110 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -209,123 +199,110 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
int cropSize = detector.getInputSize(); int cropSize = detector.getInputSize();
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform = frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize,
ImageUtils.getTransformationMatrix( cropSize, sensorOrientation, MAINTAIN_ASPECT);
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix(); cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform); frameToCropTransform.invert(cropToFrameTransform);
}); });
} }
@Override @Override
protected void processImage() { protected void processImage() {
++timestamp; ++timestamp;
final long currTimestamp = timestamp; final long currTimestamp = timestamp;
trackingOverlay.postInvalidate(); trackingOverlay.postInvalidate();
// No mutex needed as this method is not reentrant. // No mutex needed as this method is not reentrant.
if (computingDetection) { if (computingDetection) {
readyForNextImage(); readyForNextImage();
return; return;
} }
computingDetection = true; computingDetection = true;
LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread."); LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight); rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
readyForNextImage(); readyForNextImage();
final Canvas canvas = new Canvas(croppedBitmap); final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null); canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
// For examining the actual TF input. // For examining the actual TF input.
if (SAVE_PREVIEW_BITMAP) { if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap); ImageUtils.saveBitmap(croppedBitmap);
} }
runInBackground( runInBackground(new Runnable() {
new Runnable() { @Override
@Override public void run() {
public void run() { LOGGER.i("Running detection on image " + currTimestamp);
LOGGER.i("Running detection on image " + currTimestamp); final long startTime = SystemClock.uptimeMillis();
final long startTime = SystemClock.uptimeMillis(); final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap); lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
Log.e("CHECK", "run: " + results.size());
Log.e("CHECK", "run: " + results.size());
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap); final Canvas canvas = new Canvas(cropCopyBitmap);
final Canvas canvas = new Canvas(cropCopyBitmap); final Paint paint = new Paint();
final Paint paint = new Paint(); paint.setColor(Color.RED);
paint.setColor(Color.RED); paint.setStyle(Style.STROKE);
paint.setStyle(Style.STROKE); paint.setStrokeWidth(2.0f);
paint.setStrokeWidth(2.0f);
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API; switch (MODE) {
switch (MODE) { case TF_OD_API:
case TF_OD_API: minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API; break;
break; }
}
final List<Classifier.Recognition> mappedRecognitions = new LinkedList<>();
final List<Classifier.Recognition> mappedRecognitions =
new LinkedList<Classifier.Recognition>(); for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation();
for (final Classifier.Recognition result : results) { if (location != null && result.getConfidence() >= minimumConfidence) {
final RectF location = result.getLocation(); canvas.drawRect(location, paint);
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas.drawRect(location, paint); cropToFrameTransform.mapRect(location);
cropToFrameTransform.mapRect(location); result.setLocation(location);
mappedRecognitions.add(result);
result.setLocation(location); }
mappedRecognitions.add(result); }
}
} tracker.trackResults(mappedRecognitions, currTimestamp);
trackingOverlay.postInvalidate();
tracker.trackResults(mappedRecognitions, currTimestamp);
trackingOverlay.postInvalidate(); computingDetection = false;
computingDetection = false; runOnUiThread(() -> {
showFrameInfo(previewWidth + "x" + previewHeight);
runOnUiThread( showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
new Runnable() { showInference(lastProcessingTimeMs + "ms");
@Override
public void run() {
showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms");
}
});
}
}); });
} }
});
@Override }
protected int getLayoutId() {
return R.layout.tfe_od_camera_connection_fragment_tracking; @Override
} protected int getLayoutId() {
return R.layout.tfe_od_camera_connection_fragment_tracking;
@Override }
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE; @Override
} protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
// Which detection model to use: by default uses Tensorflow Object Detection API frozen }
// checkpoints.
private enum DetectorMode { // Which detection model to use: by default uses Tensorflow Object Detection API
TF_OD_API; // frozen
} // checkpoints.
private enum DetectorMode {
@Override TF_OD_API;
protected void setUseNNAPI(final boolean isChecked) { }
runInBackground(() -> detector.setUseNNAPI(isChecked));
} @Override
protected void setNumThreads(final int numThreads) {
@Override runInBackground(() -> detector.setNumThreads(numThreads));
protected void setNumThreads(final int numThreads) { }
runInBackground(() -> detector.setNumThreads(numThreads));
}
} }
...@@ -22,51 +22,52 @@ import android.view.TextureView; ...@@ -22,51 +22,52 @@ import android.view.TextureView;
/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */ /** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
public class AutoFitTextureView extends TextureView { public class AutoFitTextureView extends TextureView {
private int ratioWidth = 0; private int ratioWidth = 0;
private int ratioHeight = 0; private int ratioHeight = 0;
public AutoFitTextureView(final Context context) { public AutoFitTextureView(final Context context) {
this(context, null); this(context, null);
} }
public AutoFitTextureView(final Context context, final AttributeSet attrs) { public AutoFitTextureView(final Context context, final AttributeSet attrs) {
this(context, attrs, 0); this(context, attrs, 0);
} }
public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) { public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle); super(context, attrs, defStyle);
} }
/** /**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio * Sets the aspect ratio for this view. The size of the view will be measured
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that is, * based on the ratio calculated from the parameters. Note that the actual sizes
* calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. * of parameters don't matter, that is, calling setAspectRatio(2, 3) and
* * setAspectRatio(4, 6) make the same result.
* @param width Relative horizontal size *
* @param height Relative vertical size * @param width Relative horizontal size
*/ * @param height Relative vertical size
public void setAspectRatio(final int width, final int height) { */
if (width < 0 || height < 0) { public void setAspectRatio(final int width, final int height) {
throw new IllegalArgumentException("Size cannot be negative."); if (width < 0 || height < 0) {
} throw new IllegalArgumentException("Size cannot be negative.");
ratioWidth = width; }
ratioHeight = height; ratioWidth = width;
requestLayout(); ratioHeight = height;
} requestLayout();
}
@Override @Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) { protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec); super.onMeasure(widthMeasureSpec, heightMeasureSpec);
final int width = MeasureSpec.getSize(widthMeasureSpec); final int width = MeasureSpec.getSize(widthMeasureSpec);
final int height = MeasureSpec.getSize(heightMeasureSpec); final int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == ratioWidth || 0 == ratioHeight) { if (0 == ratioWidth || 0 == ratioHeight) {
setMeasuredDimension(width, height); setMeasuredDimension(width, height);
} else { } else {
if (width < height * ratioWidth / ratioHeight) { if (width < height * ratioWidth / ratioHeight) {
setMeasuredDimension(width, width * ratioHeight / ratioWidth); setMeasuredDimension(width, width * ratioHeight / ratioWidth);
} else { } else {
setMeasuredDimension(height * ratioWidth / ratioHeight, height); setMeasuredDimension(height * ratioWidth / ratioHeight, height);
} }
} }
} }
} }
...@@ -24,25 +24,25 @@ import java.util.List; ...@@ -24,25 +24,25 @@ import java.util.List;
/** A simple View providing a render callback to other classes. */ /** A simple View providing a render callback to other classes. */
public class OverlayView extends View { public class OverlayView extends View {
private final List<DrawCallback> callbacks = new LinkedList<DrawCallback>(); private final List<DrawCallback> callbacks = new LinkedList<DrawCallback>();
public OverlayView(final Context context, final AttributeSet attrs) { public OverlayView(final Context context, final AttributeSet attrs) {
super(context, attrs); super(context, attrs);
} }
public void addCallback(final DrawCallback callback) { public void addCallback(final DrawCallback callback) {
callbacks.add(callback); callbacks.add(callback);
} }
@Override @Override
public synchronized void draw(final Canvas canvas) { public synchronized void draw(final Canvas canvas) {
for (final DrawCallback callback : callbacks) { for (final DrawCallback callback : callbacks) {
callback.drawCallback(canvas); callback.drawCallback(canvas);
} }
} }
/** Interface defining the callback for client classes. */ /** Interface defining the callback for client classes. */
public interface DrawCallback { public interface DrawCallback {
public void drawCallback(final Canvas canvas); public void drawCallback(final Canvas canvas);
} }
} }
...@@ -18,111 +18,71 @@ package com.agenew.detection.env; ...@@ -18,111 +18,71 @@ package com.agenew.detection.env;
import android.graphics.Canvas; import android.graphics.Canvas;
import android.graphics.Color; import android.graphics.Color;
import android.graphics.Paint; import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style; import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface; import android.graphics.Typeface;
import java.util.Vector;
/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */ /**
* A class that encapsulates the tedious bits of rendering legible, bordered
* text onto a canvas.
*/
public class BorderedText { public class BorderedText {
private final Paint interiorPaint; private final Paint interiorPaint;
private final Paint exteriorPaint; private final Paint exteriorPaint;
private final float textSize; /**
* Creates a left-aligned bordered text object with a white interior, and a
/** * black exterior with the specified text size.
* Creates a left-aligned bordered text object with a white interior, and a black exterior with *
* the specified text size. * @param textSize text size in pixels
* */
* @param textSize text size in pixels public BorderedText(final float textSize) {
*/ this(Color.WHITE, Color.BLACK, textSize);
public BorderedText(final float textSize) { }
this(Color.WHITE, Color.BLACK, textSize);
} /**
* Create a bordered text object with the specified interior and exterior
/** * colors, text size and alignment.
* Create a bordered text object with the specified interior and exterior colors, text size and *
* alignment. * @param interiorColor the interior text color
* * @param exteriorColor the exterior text color
* @param interiorColor the interior text color * @param textSize text size in pixels
* @param exteriorColor the exterior text color */
* @param textSize text size in pixels public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
*/ interiorPaint = new Paint();
public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) { interiorPaint.setTextSize(textSize);
interiorPaint = new Paint(); interiorPaint.setColor(interiorColor);
interiorPaint.setTextSize(textSize); interiorPaint.setStyle(Style.FILL);
interiorPaint.setColor(interiorColor); interiorPaint.setAntiAlias(false);
interiorPaint.setStyle(Style.FILL); interiorPaint.setAlpha(255);
interiorPaint.setAntiAlias(false);
interiorPaint.setAlpha(255); exteriorPaint = new Paint();
exteriorPaint.setTextSize(textSize);
exteriorPaint = new Paint(); exteriorPaint.setColor(exteriorColor);
exteriorPaint.setTextSize(textSize); exteriorPaint.setStyle(Style.FILL_AND_STROKE);
exteriorPaint.setColor(exteriorColor); exteriorPaint.setStrokeWidth(textSize / 8);
exteriorPaint.setStyle(Style.FILL_AND_STROKE); exteriorPaint.setAntiAlias(false);
exteriorPaint.setStrokeWidth(textSize / 8); exteriorPaint.setAlpha(255);
exteriorPaint.setAntiAlias(false); }
exteriorPaint.setAlpha(255);
public void setTypeface(Typeface typeface) {
this.textSize = textSize; interiorPaint.setTypeface(typeface);
} exteriorPaint.setTypeface(typeface);
}
public void setTypeface(Typeface typeface) {
interiorPaint.setTypeface(typeface); public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
exteriorPaint.setTypeface(typeface); canvas.drawText(text, posX, posY, exteriorPaint);
} canvas.drawText(text, posX, posY, interiorPaint);
}
public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
canvas.drawText(text, posX, posY, exteriorPaint); public void drawText(final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
canvas.drawText(text, posX, posY, interiorPaint);
} float width = exteriorPaint.measureText(text);
float textSize = exteriorPaint.getTextSize();
public void drawText( Paint paint = new Paint(bgPaint);
final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) { paint.setStyle(Paint.Style.FILL);
paint.setAlpha(160);
float width = exteriorPaint.measureText(text); canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint);
float textSize = exteriorPaint.getTextSize();
Paint paint = new Paint(bgPaint); canvas.drawText(text, posX, (posY + textSize), interiorPaint);
paint.setStyle(Paint.Style.FILL); }
paint.setAlpha(160);
canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint);
canvas.drawText(text, posX, (posY + textSize), interiorPaint);
}
public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
int lineNum = 0;
for (final String line : lines) {
drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
++lineNum;
}
}
public void setInteriorColor(final int color) {
interiorPaint.setColor(color);
}
public void setExteriorColor(final int color) {
exteriorPaint.setColor(color);
}
public float getTextSize() {
return textSize;
}
public void setAlpha(final int alpha) {
interiorPaint.setAlpha(alpha);
exteriorPaint.setAlpha(alpha);
}
public void getTextBounds(
final String line, final int index, final int count, final Rect lineBounds) {
interiorPaint.getTextBounds(line, index, count, lineBounds);
}
public void setTextAlign(final Align align) {
interiorPaint.setTextAlign(align);
exteriorPaint.setTextAlign(align);
}
} }
...@@ -23,197 +23,183 @@ import java.io.FileOutputStream; ...@@ -23,197 +23,183 @@ import java.io.FileOutputStream;
/** Utility class for manipulating images. */ /** Utility class for manipulating images. */
public class ImageUtils { public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// are normalized to eight bits. // ranges
static final int kMaxChannelValue = 262143; // are normalized to eight bits.
static final int kMaxChannelValue = 262143;
@SuppressWarnings("unused") private static final Logger LOGGER = new Logger();
private static final Logger LOGGER = new Logger();
/**
/** * Utility method to compute the allocated size in bytes of a YUV420SP image of
* Utility method to compute the allocated size in bytes of a YUV420SP image of the given * the given dimensions.
* dimensions. */
*/ public static int getYUVByteSize(final int width, final int height) {
public static int getYUVByteSize(final int width, final int height) { // The luminance plane requires 1 byte per pixel.
// The luminance plane requires 1 byte per pixel. final int ySize = width * height;
final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. // up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V. // Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
return ySize + uvSize; return ySize + uvSize;
} }
/** /**
* Saves a Bitmap object to disk for analysis. * Saves a Bitmap object to disk for analysis.
* *
* @param bitmap The bitmap to save. * @param bitmap The bitmap to save.
*/ */
public static void saveBitmap(final Bitmap bitmap) { public static void saveBitmap(final Bitmap bitmap) {
saveBitmap(bitmap, "preview.png"); saveBitmap(bitmap, "preview.png");
} }
/** /**
* Saves a Bitmap object to disk for analysis. * Saves a Bitmap object to disk for analysis.
* *
* @param bitmap The bitmap to save. * @param bitmap The bitmap to save.
* @param filename The location to save the bitmap to. * @param filename The location to save the bitmap to.
*/ */
public static void saveBitmap(final Bitmap bitmap, final String filename) { public static void saveBitmap(final Bitmap bitmap, final String filename) {
final String root = final String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow"; LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); final File myDir = new File(root);
final File myDir = new File(root);
if (!myDir.mkdirs()) {
if (!myDir.mkdirs()) { LOGGER.i("Make dir failed");
LOGGER.i("Make dir failed"); }
}
final String fname = filename;
final String fname = filename; final File file = new File(myDir, fname);
final File file = new File(myDir, fname); if (file.exists()) {
if (file.exists()) { file.delete();
file.delete(); }
} try {
try { final FileOutputStream out = new FileOutputStream(file);
final FileOutputStream out = new FileOutputStream(file); bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
bitmap.compress(Bitmap.CompressFormat.PNG, 99, out); out.flush();
out.flush(); out.close();
out.close(); } catch (final Exception e) {
} catch (final Exception e) { LOGGER.e(e, "Exception!");
LOGGER.e(e, "Exception!"); }
} }
}
public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) {
public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) { final int frameSize = width * height;
final int frameSize = width * height; for (int j = 0, yp = 0; j < height; j++) {
for (int j = 0, yp = 0; j < height; j++) { int uvp = frameSize + (j >> 1) * width;
int uvp = frameSize + (j >> 1) * width; int u = 0;
int u = 0; int v = 0;
int v = 0;
for (int i = 0; i < width; i++, yp++) {
for (int i = 0; i < width; i++, yp++) { int y = 0xff & input[yp];
int y = 0xff & input[yp]; if ((i & 1) == 0) {
if ((i & 1) == 0) { v = 0xff & input[uvp++];
v = 0xff & input[uvp++]; u = 0xff & input[uvp++];
u = 0xff & input[uvp++]; }
}
output[yp] = YUV2RGB(y, u, v);
output[yp] = YUV2RGB(y, u, v); }
} }
} }
}
private static int YUV2RGB(int y, int u, int v) {
private static int YUV2RGB(int y, int u, int v) { // Adjust and check YUV values
// Adjust and check YUV values y = (y - 16) < 0 ? 0 : (y - 16);
y = (y - 16) < 0 ? 0 : (y - 16); u -= 128;
u -= 128; v -= 128;
v -= 128;
// This is the floating point equivalent. We do the conversion in integer
// This is the floating point equivalent. We do the conversion in integer // because some Android devices do not have floating point in hardware.
// because some Android devices do not have floating point in hardware. // nR = (int)(1.164 * nY + 2.018 * nU);
// nR = (int)(1.164 * nY + 2.018 * nU); // nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU); // nB = (int)(1.164 * nY + 1.596 * nV);
// nB = (int)(1.164 * nY + 1.596 * nV); int y1192 = 1192 * y;
int y1192 = 1192 * y; int r = (y1192 + 1634 * v);
int r = (y1192 + 1634 * v); int g = (y1192 - 833 * v - 400 * u);
int g = (y1192 - 833 * v - 400 * u); int b = (y1192 + 2066 * u);
int b = (y1192 + 2066 * u);
// Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
// Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ] r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r); g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g); b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); }
}
public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData, int width, int height,
public static void convertYUV420ToARGB8888( int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
byte[] yData, int yp = 0;
byte[] uData, for (int j = 0; j < height; j++) {
byte[] vData, int pY = yRowStride * j;
int width, int pUV = uvRowStride * (j >> 1);
int height,
int yRowStride, for (int i = 0; i < width; i++) {
int uvRowStride, int uv_offset = pUV + (i >> 1) * uvPixelStride;
int uvPixelStride,
int[] out) { out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
int yp = 0; }
for (int j = 0; j < height; j++) { }
int pY = yRowStride * j; }
int pUV = uvRowStride * (j >> 1);
/**
for (int i = 0; i < width; i++) { * Returns a transformation matrix from one reference frame into another.
int uv_offset = pUV + (i >> 1) * uvPixelStride; * Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]); * @param srcWidth Width of source frame.
} * @param srcHeight Height of source frame.
} * @param dstWidth Width of destination frame.
} * @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to
/** * another. Must be a multiple of 90.
* Returns a transformation matrix from one reference frame into another. Handles cropping (if * @param maintainAspectRatio If true, will ensure that scaling in x and y
* maintaining aspect ratio is desired) and rotation. * remains constant, cropping the image if necessary.
* * @return The transformation fulfilling the desired requirements.
* @param srcWidth Width of source frame. */
* @param srcHeight Height of source frame. public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
* @param dstWidth Width of destination frame. final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
* @param dstHeight Height of destination frame. final Matrix matrix = new Matrix();
* @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
* of 90. if (applyRotation != 0) {
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, if (applyRotation % 90 != 0) {
* cropping the image if necessary. LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
* @return The transformation fulfilling the desired requirements. }
*/
public static Matrix getTransformationMatrix( // Translate so center of image is at origin.
final int srcWidth, matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
final int srcHeight,
final int dstWidth, // Rotate around origin.
final int dstHeight, matrix.postRotate(applyRotation);
final int applyRotation, }
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix(); // Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
if (applyRotation != 0) { final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
if (applyRotation % 90 != 0) {
LOGGER.w("Rotation of %d % 90 != 0", applyRotation); final int inWidth = transpose ? srcHeight : srcWidth;
} final int inHeight = transpose ? srcWidth : srcHeight;
// Translate so center of image is at origin. // Apply scaling if necessary.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f); if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
// Rotate around origin. final float scaleFactorY = dstHeight / (float) inHeight;
matrix.postRotate(applyRotation);
} if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// Account for the already applied rotation, if any, and then determine how // maintaining the aspect ratio. Some image may fall off the edge.
// much scaling is needed for each axis. final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0; matrix.postScale(scaleFactor, scaleFactor);
} else {
final int inWidth = transpose ? srcHeight : srcWidth; // Scale exactly to fill dst from src.
final int inHeight = transpose ? srcWidth : srcHeight; matrix.postScale(scaleFactorX, scaleFactorY);
}
// Apply scaling if necessary. }
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth; if (applyRotation != 0) {
final float scaleFactorY = dstHeight / (float) inHeight; // Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
if (maintainAspectRatio) { }
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge. return matrix;
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY); }
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
} }
...@@ -19,168 +19,131 @@ import android.util.Log; ...@@ -19,168 +19,131 @@ import android.util.Log;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */ /**
* Wrapper for the platform log function, allows convenient message prefixing
* and log disabling.
*/
public final class Logger { public final class Logger {
private static final String DEFAULT_TAG = "tensorflow"; private static final String DEFAULT_TAG = "tensorflow";
private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG; private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
// Classes to be ignored when examining the stack trace // Classes to be ignored when examining the stack trace
private static final Set<String> IGNORED_CLASS_NAMES; private static final Set<String> IGNORED_CLASS_NAMES;
static { static {
IGNORED_CLASS_NAMES = new HashSet<String>(3); IGNORED_CLASS_NAMES = new HashSet<String>(3);
IGNORED_CLASS_NAMES.add("dalvik.system.VMStack"); IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
IGNORED_CLASS_NAMES.add("java.lang.Thread"); IGNORED_CLASS_NAMES.add("java.lang.Thread");
IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName()); IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
} }
private final String tag; private final String tag;
private final String messagePrefix; private final String messagePrefix;
private int minLogLevel = DEFAULT_MIN_LOG_LEVEL; private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
/** /**
* Creates a Logger using the class name as the message prefix. * Creates a Logger with a custom tag and a custom message prefix. If the
* * message prefix is set to
* @param clazz the simple name of this class is used as the message prefix. *
*/ * <pre>
public Logger(final Class<?> clazz) { * null
this(clazz.getSimpleName()); * </pre>
} *
* , the caller's class name is used as the prefix.
/** *
* Creates a Logger using the specified message prefix. * @param tag identifies the source of a log message.
* * @param messagePrefix prepended to every message if non-null. If null, the
* @param messagePrefix is prepended to the text of every message. * name of the caller is being used
*/ */
public Logger(final String messagePrefix) { public Logger(final String tag, final String messagePrefix) {
this(DEFAULT_TAG, messagePrefix); this.tag = tag;
} final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
/** }
* Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
* /** Creates a Logger using the caller's class name as the message prefix. */
* <pre>null</pre> public Logger() {
* this(DEFAULT_TAG, null);
* , the caller's class name is used as the prefix. }
*
* @param tag identifies the source of a log message. /**
* @param messagePrefix prepended to every message if non-null. If null, the name of the caller is * Return caller's simple name.
* being used *
*/ * <p>
public Logger(final String tag, final String messagePrefix) { * Android getStackTrace() returns an array that looks like this: stackTrace[0]:
this.tag = tag; * dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix; * com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix; * com.google.android.apps.unveil.BaseApplication
} *
* <p>
/** Creates a Logger using the caller's class name as the message prefix. */ * This function returns the simple version of the first non-filtered name.
public Logger() { *
this(DEFAULT_TAG, null); * @return caller's simple name
} */
private static String getCallerSimpleName() {
/** Creates a Logger using the caller's class name as the message prefix. */ // Get the current callstack so we can pull the class of the caller off of it.
public Logger(final int minLogLevel) { final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
this(DEFAULT_TAG, null);
this.minLogLevel = minLogLevel; for (final StackTraceElement elem : stackTrace) {
} final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) {
/** // We're only interested in the simple name of the class, not the complete
* Return caller's simple name. // package.
* final String[] classParts = className.split("\\.");
* <p>Android getStackTrace() returns an array that looks like this: stackTrace[0]: return classParts[classParts.length - 1];
* dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]: }
* com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]: }
* com.google.android.apps.unveil.BaseApplication
* return Logger.class.getSimpleName();
* <p>This function returns the simple version of the first non-filtered name. }
*
* @return caller's simple name public boolean isLoggable(final int logLevel) {
*/ return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
private static String getCallerSimpleName() { }
// Get the current callstack so we can pull the class of the caller off of it.
final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace(); private String toMessage(final String format, final Object... args) {
return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
for (final StackTraceElement elem : stackTrace) { }
final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) { public void v(final String format, final Object... args) {
// We're only interested in the simple name of the class, not the complete package. if (isLoggable(Log.VERBOSE)) {
final String[] classParts = className.split("\\."); Log.v(tag, toMessage(format, args));
return classParts[classParts.length - 1]; }
} }
}
public void d(final String format, final Object... args) {
return Logger.class.getSimpleName(); if (isLoggable(Log.DEBUG)) {
} Log.d(tag, toMessage(format, args));
}
public void setMinLogLevel(final int minLogLevel) { }
this.minLogLevel = minLogLevel;
} public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
public boolean isLoggable(final int logLevel) { Log.i(tag, toMessage(format, args));
return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel); }
} }
private String toMessage(final String format, final Object... args) { public void i(final Throwable t, final String format, final Object... args) {
return messagePrefix + (args.length > 0 ? String.format(format, args) : format); if (isLoggable(Log.INFO)) {
} Log.i(tag, toMessage(format, args), t);
}
public void v(final String format, final Object... args) { }
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args)); public void w(final String format, final Object... args) {
} if (isLoggable(Log.WARN)) {
} Log.w(tag, toMessage(format, args));
}
public void v(final Throwable t, final String format, final Object... args) { }
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args), t); public void e(final String format, final Object... args) {
} if (isLoggable(Log.ERROR)) {
} Log.e(tag, toMessage(format, args));
}
public void d(final String format, final Object... args) { }
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args)); public void e(final Throwable t, final String format, final Object... args) {
} if (isLoggable(Log.ERROR)) {
} Log.e(tag, toMessage(format, args), t);
}
public void d(final Throwable t, final String format, final Object... args) { }
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args), t);
}
}
public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args));
}
}
public void i(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args), t);
}
}
public void w(final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args));
}
}
public void w(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args), t);
}
}
public void e(final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args));
}
}
public void e(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args), t);
}
}
} }
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Bitmap;
import android.text.TextUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/** Size class independent of a Camera object. */
public class Size implements Comparable<Size>, Serializable {
// 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
// upgrading.
public static final long serialVersionUID = 7689808733290872361L;
public final int width;
public final int height;
public Size(final int width, final int height) {
this.width = width;
this.height = height;
}
public Size(final Bitmap bmp) {
this.width = bmp.getWidth();
this.height = bmp.getHeight();
}
/**
* Rotate a size by the given number of degrees.
*
* @param size Size to rotate.
* @param rotation Degrees {0, 90, 180, 270} to rotate the size.
* @return Rotated size.
*/
public static Size getRotatedSize(final Size size, final int rotation) {
if (rotation % 180 != 0) {
// The phone is portrait, therefore the camera is sideways and frame should be rotated.
return new Size(size.height, size.width);
}
return size;
}
public static Size parseFromString(String sizeString) {
if (TextUtils.isEmpty(sizeString)) {
return null;
}
sizeString = sizeString.trim();
// The expected format is "<width>x<height>".
final String[] components = sizeString.split("x");
if (components.length == 2) {
try {
final int width = Integer.parseInt(components[0]);
final int height = Integer.parseInt(components[1]);
return new Size(width, height);
} catch (final NumberFormatException e) {
return null;
}
} else {
return null;
}
}
public static List<Size> sizeStringToList(final String sizes) {
final List<Size> sizeList = new ArrayList<Size>();
if (sizes != null) {
final String[] pairs = sizes.split(",");
for (final String pair : pairs) {
final Size size = Size.parseFromString(pair);
if (size != null) {
sizeList.add(size);
}
}
}
return sizeList;
}
public static String sizeListToString(final List<Size> sizes) {
String sizesString = "";
if (sizes != null && sizes.size() > 0) {
sizesString = sizes.get(0).toString();
for (int i = 1; i < sizes.size(); i++) {
sizesString += "," + sizes.get(i).toString();
}
}
return sizesString;
}
public static final String dimensionsAsString(final int width, final int height) {
return width + "x" + height;
}
public final float aspectRatio() {
return (float) width / (float) height;
}
@Override
public int compareTo(final Size other) {
return width * height - other.width * other.height;
}
@Override
public boolean equals(final Object other) {
if (other == null) {
return false;
}
if (!(other instanceof Size)) {
return false;
}
final Size otherSize = (Size) other;
return (width == otherSize.width && height == otherSize.height);
}
@Override
public int hashCode() {
return width * 32713 + height;
}
@Override
public String toString() {
return dimensionsAsString(width, height);
}
}
package com.agenew.detection.env; package com.agenew.detection.env;
import android.content.Context;
import android.content.res.AssetFileDescriptor; import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager; import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.os.Environment;
import android.util.Log; import android.util.Log;
import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.MappedByteBuffer; import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
...@@ -63,149 +54,4 @@ public class Utils { ...@@ -63,149 +54,4 @@ public class Utils {
return result; return result;
} }
public static void softmax(final float[] vals) {
float max = Float.NEGATIVE_INFINITY;
for (final float val : vals) {
max = Math.max(max, val);
}
float sum = 0.0f;
for (int i = 0; i < vals.length; ++i) {
vals[i] = (float) Math.exp(vals[i] - max);
sum += vals[i];
}
for (int i = 0; i < vals.length; ++i) {
vals[i] = vals[i] / sum;
}
}
public static float expit(final float x) {
return (float) (1. / (1. + Math.exp(-x)));
}
// public static Bitmap scale(Context context, String filePath) {
// AssetManager assetManager = context.getAssets();
//
// InputStream istr;
// Bitmap bitmap = null;
// try {
// istr = assetManager.open(filePath);
// bitmap = BitmapFactory.decodeStream(istr);
// bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
// } catch (IOException e) {
// // handle exception
// Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
// }
//
// return bitmap;
// }
public static Bitmap getBitmapFromAsset(Context context, String filePath) {
AssetManager assetManager = context.getAssets();
InputStream istr;
Bitmap bitmap = null;
try {
istr = assetManager.open(filePath);
bitmap = BitmapFactory.decodeStream(istr);
// return bitmap.copy(Bitmap.Config.ARGB_8888,true);
} catch (IOException e) {
// handle exception
Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
}
return bitmap;
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to
* another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y
* remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
public static Bitmap processBitmap(Bitmap source, int size) {
int image_height = source.getHeight();
int image_width = source.getWidth();
Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransformations = getTransformationMatrix(image_width, image_height, size, size, 0, false);
Matrix cropToFrameTransformations = new Matrix();
frameToCropTransformations.invert(cropToFrameTransformations);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(source, frameToCropTransformations, null);
return croppedBitmap;
}
public static void writeToFile(String data, Context context) {
try {
String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
String fileName = "myFile.txt";
File file = new File(baseDir + File.separator + fileName);
FileOutputStream stream = new FileOutputStream(file);
try {
stream.write(data.getBytes());
} finally {
stream.close();
}
} catch (IOException e) {
Log.e("Exception", "File write failed: " + e.toString());
}
}
} }
...@@ -34,8 +34,6 @@ public interface Classifier { ...@@ -34,8 +34,6 @@ public interface Classifier {
void setNumThreads(int num_threads); void setNumThreads(int num_threads);
void setUseNNAPI(boolean isChecked);
abstract float getObjThresh(); abstract float getObjThresh();
/** /**
......
...@@ -5,30 +5,19 @@ import android.content.res.AssetManager; ...@@ -5,30 +5,19 @@ import android.content.res.AssetManager;
import java.io.IOException; import java.io.IOException;
public class DetectorFactory { public class DetectorFactory {
public static YoloV5Classifier getDetector( public static YoloV5Classifier getDetector(final AssetManager assetManager, final String modelFilename)
final AssetManager assetManager, throws IOException {
final String modelFilename) String labelFilename = null;
throws IOException { boolean isQuantized = false;
String labelFilename = null; int inputSize = 0;
boolean isQuantized = false;
int inputSize = 0;
int[] output_width = new int[]{0};
int[][] masks = new int[][]{{0}};
int[] anchors = new int[]{0};
if (modelFilename.endsWith(".tflite")) { if (modelFilename.endsWith(".tflite")) {
labelFilename = "file:///android_asset/class.txt"; labelFilename = "file:///android_asset/class.txt";
isQuantized = modelFilename.endsWith("-int8.tflite"); isQuantized = modelFilename.endsWith("-int8.tflite");
inputSize = 640; inputSize = 640;
output_width = new int[]{80, 40, 20}; }
masks = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
anchors = new int[]{
10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
};
}
return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, inputSize);
inputSize); }
}
} }
...@@ -18,7 +18,6 @@ package com.agenew.detection.tflite; ...@@ -18,7 +18,6 @@ package com.agenew.detection.tflite;
import android.content.res.AssetManager; import android.content.res.AssetManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.RectF; import android.graphics.RectF;
import android.os.Build;
import android.util.Log; import android.util.Log;
//import org.tensorflow.lite.Interpreter; //import org.tensorflow.lite.Interpreter;
...@@ -45,473 +44,415 @@ import java.util.Map; ...@@ -45,473 +44,415 @@ import java.util.Map;
import java.util.PriorityQueue; import java.util.PriorityQueue;
import java.util.Vector; import java.util.Vector;
/** /**
* Wrapper for frozen detection models trained using the Tensorflow Object Detection API: * Wrapper for frozen detection models trained using the Tensorflow Object
* - https://github.com/tensorflow/models/tree/master/research/object_detection * Detection API: -
* https://github.com/tensorflow/models/tree/master/research/object_detection
* where you can find the training code. * where you can find the training code.
* <p> * <p>
* To use pretrained models in the API or convert to TF Lite models, please see docs for details: * To use pretrained models in the API or convert to TF Lite models, please see
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md * docs for details: -
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
* -
* https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
*/ */
public class YoloV5Classifier implements Classifier { public class YoloV5Classifier implements Classifier {
private static final String TAG = "YoloV5Classifier"; private static final String TAG = "YoloV5Classifier";
/** /**
* Initializes a native TensorFlow session for classifying images. * Initializes a native TensorFlow session for classifying images.
* *
* @param assetManager The asset manager to be used to load assets. * @param assetManager The asset manager to be used to load assets.
* @param modelFilename The filepath of the model GraphDef protocol buffer. * @param modelFilename The filepath of the model GraphDef protocol buffer.
* @param labelFilename The filepath of label file for classes. * @param labelFilename The filepath of label file for classes.
* @param isQuantized Boolean representing model is quantized or not * @param isQuantized Boolean representing model is quantized or not
*/ */
public static YoloV5Classifier create( public static YoloV5Classifier create(final AssetManager assetManager, final String modelFilename,
final AssetManager assetManager, final String labelFilename, final boolean isQuantized, final int inputSize) throws IOException {
final String modelFilename, final YoloV5Classifier d = new YoloV5Classifier();
final String labelFilename,
final boolean isQuantized, String actualFilename = labelFilename.split("file:///android_asset/")[1];
final int inputSize InputStream labelsInput = assetManager.open(actualFilename);
/*final int[] output_width, BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput));
final int[][] masks, String line;
final int[] anchors*/) while ((line = br.readLine()) != null) {
throws IOException { LOGGER.w(line);
final YoloV5Classifier d = new YoloV5Classifier(); d.labels.add(line);
}
String actualFilename = labelFilename.split("file:///android_asset/")[1]; br.close();
InputStream labelsInput = assetManager.open(actualFilename);
BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput)); try {
String line; Interpreter.Options options = (new Interpreter.Options());
while ((line = br.readLine()) != null) { options.setNumThreads(NUM_THREADS);
LOGGER.w(line); d.tfliteModel = Utils.loadModelFile(assetManager, modelFilename);
d.labels.add(line); d.tfLite = new Interpreter(d.tfliteModel, options);
} } catch (Exception e) {
br.close(); throw new RuntimeException(e);
}
try {
Interpreter.Options options = (new Interpreter.Options()); d.isModelQuantized = isQuantized;
options.setNumThreads(NUM_THREADS); // Pre-allocate buffers.
if (isNNAPI) { int numBytesPerChannel;
d.nnapiDelegate = null; if (isQuantized) {
// Initialize interpreter with NNAPI delegate for Android Pie or above numBytesPerChannel = 1; // Quantized
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) { } else {
d.nnapiDelegate = new NnApiDelegate(); numBytesPerChannel = 4; // Floating point
options.addDelegate(d.nnapiDelegate); }
options.setNumThreads(NUM_THREADS); d.INPUT_SIZE = inputSize;
// options.setUseNNAPI(false); d.imgData = ByteBuffer.allocateDirect(1 * d.INPUT_SIZE * d.INPUT_SIZE * 3 * numBytesPerChannel);
// options.setAllowFp16PrecisionForFp32(true); d.imgData.order(ByteOrder.nativeOrder());
// options.setAllowBufferHandleOutput(true); d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
options.setUseNNAPI(true);
} d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2)
} + Math.pow((inputSize / 8), 2)) * 3);
/* if (d.isModelQuantized) {
if (isGPU) { Tensor inpten = d.tfLite.getInputTensor(0);
GpuDelegate.Options gpu_options = new GpuDelegate.Options(); d.inp_scale = inpten.quantizationParams().getScale();
gpu_options.setPrecisionLossAllowed(true); // It seems that the default is true d.inp_zero_point = inpten.quantizationParams().getZeroPoint();
gpu_options.setInferencePreference(GpuDelegate.Options.INFERENCE_PREFERENCE_SUSTAINED_SPEED); Tensor oupten = d.tfLite.getOutputTensor(0);
d.gpuDelegate = new GpuDelegate(gpu_options); d.output_box = oupten.shape()[1];
options.addDelegate(d.gpuDelegate); d.oup_scale = oupten.quantizationParams().getScale();
}*/ d.oup_zero_point = oupten.quantizationParams().getZeroPoint();
d.tfliteModel = Utils.loadModelFile(assetManager, modelFilename); }
d.tfLite = new Interpreter(d.tfliteModel, options);
} catch (Exception e) { int[] shape = d.tfLite.getOutputTensor(0).shape();
throw new RuntimeException(e); int numClass = shape[shape.length - 1] - 5;
} d.numClass = numClass;
d.outData = ByteBuffer.allocateDirect(d.output_box * (numClass + 5) * numBytesPerChannel);
d.isModelQuantized = isQuantized; d.outData.order(ByteOrder.nativeOrder());
// Pre-allocate buffers. return d;
int numBytesPerChannel; }
if (isQuantized) {
numBytesPerChannel = 1; // Quantized public int getInputSize() {
} else { return INPUT_SIZE;
numBytesPerChannel = 4; // Floating point }
}
d.INPUT_SIZE = inputSize; @Override
d.imgData = ByteBuffer.allocateDirect(1 * d.INPUT_SIZE * d.INPUT_SIZE * 3 * numBytesPerChannel); public void enableStatLogging(final boolean logStats) {
d.imgData.order(ByteOrder.nativeOrder()); }
d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
@Override
d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2) + Math.pow((inputSize / 8), 2)) * 3); public String getStatString() {
// d.OUTPUT_WIDTH = output_width; return "";
// d.MASKS = masks; }
// d.ANCHORS = anchors;
if (d.isModelQuantized){ @Override
Tensor inpten = d.tfLite.getInputTensor(0); public void close() {
d.inp_scale = inpten.quantizationParams().getScale(); tfLite.close();
d.inp_zero_point = inpten.quantizationParams().getZeroPoint(); tfLite = null;
Tensor oupten = d.tfLite.getOutputTensor(0); /*
d.output_box = oupten.shape()[1]; * if (gpuDelegate != null) { gpuDelegate.close(); gpuDelegate = null; }
d.oup_scale = oupten.quantizationParams().getScale(); */
d.oup_zero_point = oupten.quantizationParams().getZeroPoint(); if (nnapiDelegate != null) {
} nnapiDelegate.close();
nnapiDelegate = null;
int[] shape = d.tfLite.getOutputTensor(0).shape(); }
int numClass = shape[shape.length - 1] - 5; tfliteModel = null;
d.numClass = numClass; }
d.outData = ByteBuffer.allocateDirect(d.output_box * (numClass + 5) * numBytesPerChannel);
d.outData.order(ByteOrder.nativeOrder()); public void setNumThreads(int num_threads) {
return d; if (tfLite != null)
} tfLite.setNumThreads(num_threads);
}
public int getInputSize() {
return INPUT_SIZE; private void recreateInterpreter() {
} if (tfLite != null) {
@Override tfLite.close();
public void enableStatLogging(final boolean logStats) { tfLite = new Interpreter(tfliteModel, tfliteOptions);
} }
}
@Override
public String getStatString() { public void useGpu() {
return ""; /*
} * if (gpuDelegate == null) { gpuDelegate = new GpuDelegate();
* tfliteOptions.addDelegate(gpuDelegate); recreateInterpreter(); }
@Override */
public void close() { }
tfLite.close();
tfLite = null; public void useCPU() {
/* recreateInterpreter();
if (gpuDelegate != null) { }
gpuDelegate.close();
gpuDelegate = null; public void useNNAPI() {
}*/ nnapiDelegate = new NnApiDelegate();
if (nnapiDelegate != null) { tfliteOptions.addDelegate(nnapiDelegate);
nnapiDelegate.close(); recreateInterpreter();
nnapiDelegate = null; }
}
tfliteModel = null; @Override
} public float getObjThresh() {
return MainActivity.MINIMUM_CONFIDENCE_TF_OD_API;
public void setNumThreads(int num_threads) { }
if (tfLite != null) tfLite.setNumThreads(num_threads);
} private static final Logger LOGGER = new Logger();
@Override // Float model
public void setUseNNAPI(boolean isChecked) { private final float IMAGE_MEAN = 0;
// if (tfLite != null) tfLite.setUseNNAPI(isChecked);
} private final float IMAGE_STD = 255.0f;
private void recreateInterpreter() { // config yolo
if (tfLite != null) { private int INPUT_SIZE = -1;
tfLite.close();
tfLite = new Interpreter(tfliteModel, tfliteOptions); private int output_box;
}
} // Number of threads in the java app
private static final int NUM_THREADS = 1;
public void useGpu() {
/* private boolean isModelQuantized;
if (gpuDelegate == null) {
gpuDelegate = new GpuDelegate(); /** holds a gpu delegate */
tfliteOptions.addDelegate(gpuDelegate);
recreateInterpreter();
}*/
}
public void useCPU() {
recreateInterpreter();
}
public void useNNAPI() {
nnapiDelegate = new NnApiDelegate();
tfliteOptions.addDelegate(nnapiDelegate);
recreateInterpreter();
}
@Override
public float getObjThresh() {
return MainActivity.MINIMUM_CONFIDENCE_TF_OD_API;
}
private static final Logger LOGGER = new Logger();
// Float model
private final float IMAGE_MEAN = 0;
private final float IMAGE_STD = 255.0f;
//config yolo
private int INPUT_SIZE = -1;
// private int[] OUTPUT_WIDTH;
// private int[][] MASKS;
// private int[] ANCHORS;
private int output_box;
private static final float[] XYSCALE = new float[]{1.2f, 1.1f, 1.05f};
private static final int NUM_BOXES_PER_BLOCK = 3;
// Number of threads in the java app
private static final int NUM_THREADS = 1;
private static boolean isNNAPI = false;
private static boolean isGPU = false;
private boolean isModelQuantized;
/** holds a gpu delegate */
// GpuDelegate gpuDelegate = null; // GpuDelegate gpuDelegate = null;
/** holds an nnapi delegate */ /** holds an nnapi delegate */
NnApiDelegate nnapiDelegate = null; NnApiDelegate nnapiDelegate = null;
/** The loaded TensorFlow Lite model. */ /** The loaded TensorFlow Lite model. */
private MappedByteBuffer tfliteModel; private MappedByteBuffer tfliteModel;
/** Options for configuring the Interpreter. */ /** Options for configuring the Interpreter. */
private final Interpreter.Options tfliteOptions = new Interpreter.Options(); private final Interpreter.Options tfliteOptions = new Interpreter.Options();
// Config values. // Config values.
// Pre-allocated buffers. // Pre-allocated buffers.
private Vector<String> labels = new Vector<String>(); private Vector<String> labels = new Vector<String>();
private int[] intValues; private int[] intValues;
private ByteBuffer imgData; private ByteBuffer imgData;
private ByteBuffer outData; private ByteBuffer outData;
private Interpreter tfLite; private Interpreter tfLite;
private float inp_scale; private float inp_scale;
private int inp_zero_point; private int inp_zero_point;
private float oup_scale; private float oup_scale;
private int oup_zero_point; private int oup_zero_point;
private int numClass; private int numClass;
private YoloV5Classifier() {
} private YoloV5Classifier() {
}
//non maximum suppression
protected ArrayList<Recognition> nms(ArrayList<Recognition> list) { // non maximum suppression
ArrayList<Recognition> nmsList = new ArrayList<Recognition>(); protected ArrayList<Recognition> nms(ArrayList<Recognition> list) {
ArrayList<Recognition> nmsList = new ArrayList<Recognition>();
for (int k = 0; k < labels.size(); k++) {
//1.find max confidence per class for (int k = 0; k < labels.size(); k++) {
PriorityQueue<Recognition> pq = // 1.find max confidence per class
new PriorityQueue<Recognition>( PriorityQueue<Recognition> pq = new PriorityQueue<Recognition>(50, new Comparator<Recognition>() {
50, @Override
new Comparator<Recognition>() { public int compare(final Recognition lhs, final Recognition rhs) {
@Override // Intentionally reversed to put high confidence at the head of the queue.
public int compare(final Recognition lhs, final Recognition rhs) { return Float.compare(rhs.getConfidence(), lhs.getConfidence());
// Intentionally reversed to put high confidence at the head of the queue. }
return Float.compare(rhs.getConfidence(), lhs.getConfidence()); });
}
}); for (int i = 0; i < list.size(); ++i) {
if (list.get(i).getDetectedClass() == k) {
for (int i = 0; i < list.size(); ++i) { pq.add(list.get(i));
if (list.get(i).getDetectedClass() == k) { }
pq.add(list.get(i)); }
}
} // 2.do non maximum suppression
while (pq.size() > 0) {
//2.do non maximum suppression // insert detection with max confidence
while (pq.size() > 0) { Recognition[] a = new Recognition[pq.size()];
//insert detection with max confidence Recognition[] detections = pq.toArray(a);
Recognition[] a = new Recognition[pq.size()]; Recognition max = detections[0];
Recognition[] detections = pq.toArray(a); nmsList.add(max);
Recognition max = detections[0]; pq.clear();
nmsList.add(max);
pq.clear(); for (int j = 1; j < detections.length; j++) {
Recognition detection = detections[j];
for (int j = 1; j < detections.length; j++) { RectF b = detection.getLocation();
Recognition detection = detections[j]; if (box_iou(max.getLocation(), b) < mNmsThresh) {
RectF b = detection.getLocation(); pq.add(detection);
if (box_iou(max.getLocation(), b) < mNmsThresh) { }
pq.add(detection); }
} }
} }
} return nmsList;
} }
return nmsList;
} protected float mNmsThresh = 0.6f;
protected float mNmsThresh = 0.6f; protected float box_iou(RectF a, RectF b) {
return box_intersection(a, b) / box_union(a, b);
protected float box_iou(RectF a, RectF b) { }
return box_intersection(a, b) / box_union(a, b);
} protected float box_intersection(RectF a, RectF b) {
float w = overlap((a.left + a.right) / 2, a.right - a.left, (b.left + b.right) / 2, b.right - b.left);
protected float box_intersection(RectF a, RectF b) { float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top, (b.top + b.bottom) / 2, b.bottom - b.top);
float w = overlap((a.left + a.right) / 2, a.right - a.left, if (w < 0 || h < 0)
(b.left + b.right) / 2, b.right - b.left); return 0;
float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top, float area = w * h;
(b.top + b.bottom) / 2, b.bottom - b.top); return area;
if (w < 0 || h < 0) return 0; }
float area = w * h;
return area; protected float box_union(RectF a, RectF b) {
} float i = box_intersection(a, b);
float u = (a.right - a.left) * (a.bottom - a.top) + (b.right - b.left) * (b.bottom - b.top) - i;
protected float box_union(RectF a, RectF b) { return u;
float i = box_intersection(a, b); }
float u = (a.right - a.left) * (a.bottom - a.top) + (b.right - b.left) * (b.bottom - b.top) - i;
return u; protected float overlap(float x1, float w1, float x2, float w2) {
} float l1 = x1 - w1 / 2;
float l2 = x2 - w2 / 2;
protected float overlap(float x1, float w1, float x2, float w2) { float left = l1 > l2 ? l1 : l2;
float l1 = x1 - w1 / 2; float r1 = x1 + w1 / 2;
float l2 = x2 - w2 / 2; float r2 = x2 + w2 / 2;
float left = l1 > l2 ? l1 : l2; float right = r1 < r2 ? r1 : r2;
float r1 = x1 + w1 / 2; return right - left;
float r2 = x2 + w2 / 2; }
float right = r1 < r2 ? r1 : r2;
return right - left; protected static final int BATCH_SIZE = 1;
} protected static final int PIXEL_SIZE = 3;
protected static final int BATCH_SIZE = 1; /**
protected static final int PIXEL_SIZE = 3; * Writes Image data into a {@code ByteBuffer}.
*/
/** protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
* Writes Image data into a {@code ByteBuffer}. bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
*/
protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) { imgData.rewind();
// ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * BATCH_SIZE * INPUT_SIZE * INPUT_SIZE * PIXEL_SIZE); for (int i = 0; i < INPUT_SIZE; ++i) {
// byteBuffer.order(ByteOrder.nativeOrder()); for (int j = 0; j < INPUT_SIZE; ++j) {
// int[] intValues = new int[INPUT_SIZE * INPUT_SIZE]; int pixelValue = intValues[i * INPUT_SIZE + j];
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); if (isModelQuantized) {
int pixel = 0; // Quantized model
imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale
imgData.rewind(); + inp_zero_point));
for (int i = 0; i < INPUT_SIZE; ++i) { imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale
for (int j = 0; j < INPUT_SIZE; ++j) { + inp_zero_point));
int pixelValue = intValues[i * INPUT_SIZE + j]; imgData.put((byte) (((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
if (isModelQuantized) { } else { // Float model
// Quantized model imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point)); imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point)); imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.put((byte) (((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point)); }
} else { // Float model }
imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); }
imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); return imgData;
imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD); }
}
} public ArrayList<Recognition> recognizeImage(Bitmap bitmap) {
} convertBitmapToByteBuffer(bitmap);
return imgData;
} Map<Integer, Object> outputMap = new HashMap<Integer, Object>();
outData.rewind();
public ArrayList<Recognition> recognizeImage(Bitmap bitmap) { outputMap.put(0, outData);
ByteBuffer byteBuffer_ = convertBitmapToByteBuffer(bitmap); Log.d("YoloV5Classifier", "mObjThresh: " + getObjThresh());
Map<Integer, Object> outputMap = new HashMap<>(); Object[] inputArray = { imgData };
tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
// float[][][] outbuf = new float[1][output_box][labels.size() + 5];
outData.rewind(); ByteBuffer byteBuffer = (ByteBuffer) outputMap.get(0);
outputMap.put(0, outData); byteBuffer.rewind();
Log.d("YoloV5Classifier", "mObjThresh: " + getObjThresh());
ArrayList<Recognition> detections = new ArrayList<Recognition>();
Object[] inputArray = {imgData};
tfLite.runForMultipleInputsOutputs(inputArray, outputMap); float[][][] out = new float[1][output_box][numClass + 5];
Log.d("YoloV5Classifier", "out[0] detect start");
ByteBuffer byteBuffer = (ByteBuffer) outputMap.get(0); for (int i = 0; i < output_box; ++i) {
byteBuffer.rewind(); for (int j = 0; j < numClass + 5; ++j) {
if (isModelQuantized) {
ArrayList<Recognition> detections = new ArrayList<Recognition>(); out[0][i][j] = oup_scale * (((int) byteBuffer.get() & 0xFF) - oup_zero_point);
} else {
float[][][] out = new float[1][output_box][numClass + 5]; out[0][i][j] = byteBuffer.getFloat();
Log.d("YoloV5Classifier", "out[0] detect start"); }
for (int i = 0; i < output_box; ++i) { }
for (int j = 0; j < numClass + 5; ++j) { // Denormalize xywh
if (isModelQuantized){ for (int j = 0; j < 4; ++j) {
out[0][i][j] = oup_scale * (((int) byteBuffer.get() & 0xFF) - oup_zero_point); out[0][i][j] *= getInputSize();
} }
else { }
out[0][i][j] = byteBuffer.getFloat(); for (int i = 0; i < output_box; ++i) {
} final int offset = 0;
} final float confidence = out[0][i][4];
// Denormalize xywh int detectedClass = -1;
for (int j = 0; j < 4; ++j) { float maxClass = 0;
out[0][i][j] *= getInputSize();
} final float[] classes = new float[labels.size()];
} for (int c = 0; c < labels.size(); ++c) {
for (int i = 0; i < output_box; ++i){ classes[c] = out[0][i][5 + c];
final int offset = 0; }
final float confidence = out[0][i][4];
int detectedClass = -1; for (int c = 0; c < labels.size(); ++c) {
float maxClass = 0; if (classes[c] > maxClass) {
detectedClass = c;
final float[] classes = new float[labels.size()]; maxClass = classes[c];
for (int c = 0; c < labels.size(); ++c) { }
classes[c] = out[0][i][5 + c]; }
}
final float confidenceInClass = maxClass * confidence;
for (int c = 0; c < labels.size(); ++c) { if (confidenceInClass > getObjThresh()) {
if (classes[c] > maxClass) { final float xPos = out[0][i][0];
detectedClass = c; final float yPos = out[0][i][1];
maxClass = classes[c];
} final float w = out[0][i][2];
} final float h = out[0][i][3];
Log.d("YoloV5Classifier", Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h);
final float confidenceInClass = maxClass * confidence;
if (confidenceInClass > getObjThresh()) { final RectF rect = new RectF(Math.max(0, xPos - w / 2), Math.max(0, yPos - h / 2),
final float xPos = out[0][i][0]; Math.min(bitmap.getWidth() - 1, xPos + w / 2), Math.min(bitmap.getHeight() - 1, yPos + h / 2));
final float yPos = out[0][i][1]; detections.add(new Recognition("" + offset, labels.get(detectedClass), confidenceInClass, rect,
detectedClass));
final float w = out[0][i][2]; }
final float h = out[0][i][3]; }
Log.d("YoloV5Classifier",
Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h); Log.d(TAG, "detect end");
final ArrayList<Recognition> recognitions = nms(detections);
final RectF rect = return recognitions;
new RectF( }
Math.max(0, xPos - w / 2),
Math.max(0, yPos - h / 2), public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH,
Math.min(bitmap.getWidth() - 1, xPos + w / 2), int intputSize) {
Math.min(bitmap.getHeight() - 1, yPos + h / 2)); // (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
detections.add(new Recognition("" + offset, labels.get(detectedClass), float halfHeight = height / 2.0f;
confidenceInClass, rect, detectedClass)); float halfWidth = width / 2.0f;
}
} float[] pred_coor = new float[] { x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight };
Log.d(TAG, "detect end"); // (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
final ArrayList<Recognition> recognitions = nms(detections); float resize_ratioW = 1.0f * intputSize / oriW;
// final ArrayList<Recognition> recognitions = detections; float resize_ratioH = 1.0f * intputSize / oriH;
return recognitions;
} float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; // min
public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH, int intputSize) { float dw = (intputSize - resize_ratio * oriW) / 2;
// (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax) float dh = (intputSize - resize_ratio * oriH) / 2;
float halfHeight = height / 2.0f;
float halfWidth = width / 2.0f; pred_coor[0] = 1.0f * (pred_coor[0] - dw) / resize_ratio;
pred_coor[2] = 1.0f * (pred_coor[2] - dw) / resize_ratio;
float[] pred_coor = new float[]{x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight};
pred_coor[1] = 1.0f * (pred_coor[1] - dh) / resize_ratio;
// (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org) pred_coor[3] = 1.0f * (pred_coor[3] - dh) / resize_ratio;
float resize_ratioW = 1.0f * intputSize / oriW;
float resize_ratioH = 1.0f * intputSize / oriH; // (3) clip some boxes those are out of range
pred_coor[0] = pred_coor[0] > 0 ? pred_coor[0] : 0;
float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; //min pred_coor[1] = pred_coor[1] > 0 ? pred_coor[1] : 0;
float dw = (intputSize - resize_ratio * oriW) / 2; pred_coor[2] = pred_coor[2] < (oriW - 1) ? pred_coor[2] : (oriW - 1);
float dh = (intputSize - resize_ratio * oriH) / 2; pred_coor[3] = pred_coor[3] < (oriH - 1) ? pred_coor[3] : (oriH - 1);
pred_coor[0] = 1.0f * (pred_coor[0] - dw) / resize_ratio; if ((pred_coor[0] > pred_coor[2]) || (pred_coor[1] > pred_coor[3])) {
pred_coor[2] = 1.0f * (pred_coor[2] - dw) / resize_ratio; pred_coor[0] = 0;
pred_coor[1] = 0;
pred_coor[1] = 1.0f * (pred_coor[1] - dh) / resize_ratio; pred_coor[2] = 0;
pred_coor[3] = 1.0f * (pred_coor[3] - dh) / resize_ratio; pred_coor[3] = 0;
}
// (3) clip some boxes those are out of range
pred_coor[0] = pred_coor[0] > 0 ? pred_coor[0] : 0; // (4) discard some invalid boxes
pred_coor[1] = pred_coor[1] > 0 ? pred_coor[1] : 0; float temp1 = pred_coor[2] - pred_coor[0];
float temp2 = pred_coor[3] - pred_coor[1];
pred_coor[2] = pred_coor[2] < (oriW - 1) ? pred_coor[2] : (oriW - 1); float temp = temp1 * temp2;
pred_coor[3] = pred_coor[3] < (oriH - 1) ? pred_coor[3] : (oriH - 1); if (temp < 0) {
Log.e("checkInvalidateBox", "temp < 0");
if ((pred_coor[0] > pred_coor[2]) || (pred_coor[1] > pred_coor[3])) { return false;
pred_coor[0] = 0; }
pred_coor[1] = 0; if (Math.sqrt(temp) > Float.MAX_VALUE) {
pred_coor[2] = 0; Log.e("checkInvalidateBox", "temp max");
pred_coor[3] = 0; return false;
} }
// (4) discard some invalid boxes return true;
float temp1 = pred_coor[2] - pred_coor[0]; }
float temp2 = pred_coor[3] - pred_coor[1];
float temp = temp1 * temp2;
if (temp < 0) {
Log.e("checkInvalidateBox", "temp < 0");
return false;
}
if (Math.sqrt(temp) > Float.MAX_VALUE) {
Log.e("checkInvalidateBox", "temp max");
return false;
}
return true;
}
} }
...@@ -35,178 +35,149 @@ import com.agenew.detection.env.ImageUtils; ...@@ -35,178 +35,149 @@ import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger; import com.agenew.detection.env.Logger;
import com.agenew.detection.tflite.Classifier.Recognition; import com.agenew.detection.tflite.Classifier.Recognition;
/** A tracker that handles non-max suppression and matches existing objects to new detections. */ /**
* A tracker that handles non-max suppression and matches existing objects to
* new detections.
*/
public class MultiBoxTracker { public class MultiBoxTracker {
private static final float TEXT_SIZE_DIP = 18; private static final float TEXT_SIZE_DIP = 18;
private static final float MIN_SIZE = 16.0f; private static final float MIN_SIZE = 16.0f;
private static final int[] COLORS = { private static final int[] COLORS = { Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA,
Color.BLUE, Color.WHITE, Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"),
Color.RED, Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"),
Color.GREEN, Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") };
Color.YELLOW, final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
Color.CYAN, private final Logger logger = new Logger();
Color.MAGENTA, private final Queue<Integer> availableColors = new LinkedList<Integer>();
Color.WHITE, private final List<TrackedRecognition> trackedObjects = new LinkedList<TrackedRecognition>();
Color.parseColor("#55FF55"), private final Paint boxPaint = new Paint();
Color.parseColor("#FFA500"), private final float textSizePx;
Color.parseColor("#FF8888"), private final BorderedText borderedText;
Color.parseColor("#AAAAFF"), private Matrix frameToCanvasMatrix;
Color.parseColor("#FFFFAA"), private int frameWidth;
Color.parseColor("#55AAAA"), private int frameHeight;
Color.parseColor("#AA33AA"), private int sensorOrientation;
Color.parseColor("#0D0068")
}; public MultiBoxTracker(final Context context) {
final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>(); for (final int color : COLORS) {
private final Logger logger = new Logger(); availableColors.add(color);
private final Queue<Integer> availableColors = new LinkedList<Integer>(); }
private final List<TrackedRecognition> trackedObjects = new LinkedList<TrackedRecognition>();
private final Paint boxPaint = new Paint(); boxPaint.setColor(Color.RED);
private final float textSizePx; boxPaint.setStyle(Style.STROKE);
private final BorderedText borderedText; boxPaint.setStrokeWidth(10.0f);
private Matrix frameToCanvasMatrix; boxPaint.setStrokeCap(Cap.ROUND);
private int frameWidth; boxPaint.setStrokeJoin(Join.ROUND);
private int frameHeight; boxPaint.setStrokeMiter(100);
private int sensorOrientation;
textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
public MultiBoxTracker(final Context context) { context.getResources().getDisplayMetrics());
for (final int color : COLORS) { borderedText = new BorderedText(textSizePx);
availableColors.add(color); }
}
public synchronized void setFrameConfiguration(final int width, final int height, final int sensorOrientation) {
boxPaint.setColor(Color.RED); frameWidth = width;
boxPaint.setStyle(Style.STROKE); frameHeight = height;
boxPaint.setStrokeWidth(10.0f); this.sensorOrientation = sensorOrientation;
boxPaint.setStrokeCap(Cap.ROUND); }
boxPaint.setStrokeJoin(Join.ROUND);
boxPaint.setStrokeMiter(100); public synchronized void drawDebug(final Canvas canvas) {
final Paint textPaint = new Paint();
textSizePx = textPaint.setColor(Color.WHITE);
TypedValue.applyDimension( textPaint.setTextSize(60.0f);
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx); final Paint boxPaint = new Paint();
} boxPaint.setColor(Color.RED);
boxPaint.setAlpha(200);
public synchronized void setFrameConfiguration( boxPaint.setStyle(Style.STROKE);
final int width, final int height, final int sensorOrientation) {
frameWidth = width; for (final Pair<Float, RectF> detection : screenRects) {
frameHeight = height; final RectF rect = detection.second;
this.sensorOrientation = sensorOrientation; canvas.drawRect(rect, boxPaint);
} canvas.drawText("" + detection.first, rect.left, rect.top, textPaint);
borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first);
public synchronized void drawDebug(final Canvas canvas) { }
final Paint textPaint = new Paint(); }
textPaint.setColor(Color.WHITE);
textPaint.setTextSize(60.0f); public synchronized void trackResults(final List<Recognition> results, final long timestamp) {
logger.i("Processing %d results from %d", results.size(), timestamp);
final Paint boxPaint = new Paint(); processResults(results);
boxPaint.setColor(Color.RED); }
boxPaint.setAlpha(200);
boxPaint.setStyle(Style.STROKE); private Matrix getFrameToCanvasMatrix() {
return frameToCanvasMatrix;
for (final Pair<Float, RectF> detection : screenRects) { }
final RectF rect = detection.second;
canvas.drawRect(rect, boxPaint); public synchronized void draw(final Canvas canvas) {
canvas.drawText("" + detection.first, rect.left, rect.top, textPaint); final boolean rotated = sensorOrientation % 180 == 90;
borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first); final float multiplier = Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
} canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
} frameToCanvasMatrix = ImageUtils.getTransformationMatrix(frameWidth, frameHeight,
(int) (multiplier * (rotated ? frameHeight : frameWidth)),
public synchronized void trackResults(final List<Recognition> results, final long timestamp) { (int) (multiplier * (rotated ? frameWidth : frameHeight)), sensorOrientation, false);
logger.i("Processing %d results from %d", results.size(), timestamp); for (final TrackedRecognition recognition : trackedObjects) {
processResults(results); final RectF trackedPos = new RectF(recognition.location);
}
getFrameToCanvasMatrix().mapRect(trackedPos);
private Matrix getFrameToCanvasMatrix() { boxPaint.setColor(recognition.color);
return frameToCanvasMatrix;
} float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
public synchronized void draw(final Canvas canvas) {
final boolean rotated = sensorOrientation % 180 == 90; final String labelString = !TextUtils.isEmpty(recognition.title)
final float multiplier = ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
Math.min( : String.format("%.2f", (100 * recognition.detectionConfidence));
canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight), borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth)); }
frameToCanvasMatrix = }
ImageUtils.getTransformationMatrix(
frameWidth, private void processResults(final List<Recognition> results) {
frameHeight, final List<Pair<Float, Recognition>> rectsToTrack = new LinkedList<Pair<Float, Recognition>>();
(int) (multiplier * (rotated ? frameHeight : frameWidth)),
(int) (multiplier * (rotated ? frameWidth : frameHeight)), screenRects.clear();
sensorOrientation, final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
false);
for (final TrackedRecognition recognition : trackedObjects) { for (final Recognition result : results) {
final RectF trackedPos = new RectF(recognition.location); if (result.getLocation() == null) {
continue;
getFrameToCanvasMatrix().mapRect(trackedPos); }
boxPaint.setColor(recognition.color); final RectF detectionFrameRect = new RectF(result.getLocation());
float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f; final RectF detectionScreenRect = new RectF();
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint); rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
final String labelString = logger.v("Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
!TextUtils.isEmpty(recognition.title)
? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence)) screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
: String.format("%.2f", (100 * recognition.detectionConfidence));
// borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
// labelString); logger.w("Degenerate rectangle! " + detectionFrameRect);
borderedText.drawText( continue;
canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint); }
}
} rectsToTrack.add(new Pair<Float, Recognition>(result.getConfidence(), result));
}
private void processResults(final List<Recognition> results) {
final List<Pair<Float, Recognition>> rectsToTrack = new LinkedList<Pair<Float, Recognition>>(); trackedObjects.clear();
if (rectsToTrack.isEmpty()) {
screenRects.clear(); logger.v("Nothing to track, aborting.");
final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix()); return;
}
for (final Recognition result : results) {
if (result.getLocation() == null) { for (final Pair<Float, Recognition> potential : rectsToTrack) {
continue; final TrackedRecognition trackedRecognition = new TrackedRecognition();
} trackedRecognition.detectionConfidence = potential.first;
final RectF detectionFrameRect = new RectF(result.getLocation()); trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.title = potential.second.getTitle();
final RectF detectionScreenRect = new RectF(); trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length];
rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect); trackedObjects.add(trackedRecognition);
}
logger.v( }
"Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
private static class TrackedRecognition {
screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect)); RectF location;
float detectionConfidence;
if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) { int color;
logger.w("Degenerate rectangle! " + detectionFrameRect); String title;
continue; }
}
rectsToTrack.add(new Pair<Float, Recognition>(result.getConfidence(), result));
}
trackedObjects.clear();
if (rectsToTrack.isEmpty()) {
logger.v("Nothing to track, aborting.");
return;
}
for (final Pair<Float, Recognition> potential : rectsToTrack) {
final TrackedRecognition trackedRecognition = new TrackedRecognition();
trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.title = potential.second.getTitle();
// trackedRecognition.color = COLORS[trackedObjects.size() % COLORS.length];
trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length];
trackedObjects.add(trackedRecognition);
// if (trackedObjects.size() >= COLORS.length) {
// break;
// }
}
}
private static class TrackedRecognition {
RectF location;
float detectionConfidence;
int color;
String title;
}
} }
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0"/>
<item
android:color="#00000000"
android:offset="1.0"/>
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<resources>
<!-- Base application theme. -->
<style name="AppTheme.ObjectDetection" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/tfe_color_primary</item>
<item name="colorPrimaryDark">@color/tfe_color_primary_dark</item>
<item name="colorAccent">@color/tfe_color_accent</item>
</style>
</resources>
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!