Commit 2756155a by wanglei

format

1 parent e08d779b
Showing with 284 additions and 1114 deletions
...@@ -14,11 +14,9 @@ ...@@ -14,11 +14,9 @@
<uses-permission android:name="android.permission.INTERNET"/> <uses-permission android:name="android.permission.INTERNET"/>
<application <application
android:allowBackup="true" android:allowBackup="true"
android:icon="@mipmap/ic_launcher" android:icon="@drawable/ic_launcher"
android:label="@string/tfe_od_app_name" android:label="@string/tfe_od_app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true" android:supportsRtl="true"
android:theme="@style/AppTheme.ObjectDetection"
android:hardwareAccelerated="true" android:hardwareAccelerated="true"
android:installLocation="internalOnly"> android:installLocation="internalOnly">
......
...@@ -17,6 +17,7 @@ ...@@ -17,6 +17,7 @@
package com.agenew.detection; package com.agenew.detection;
import android.Manifest; import android.Manifest;
import android.app.Activity;
import android.app.Fragment; import android.app.Fragment;
import android.content.Context; import android.content.Context;
import android.content.pm.PackageManager; import android.content.pm.PackageManager;
...@@ -36,17 +37,12 @@ import android.os.Handler; ...@@ -36,17 +37,12 @@ import android.os.Handler;
import android.os.HandlerThread; import android.os.HandlerThread;
import android.os.Trace; import android.os.Trace;
import androidx.annotation.NonNull; import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
//import androidx.appcompat.widget.Toolbar;
import android.util.Size; import android.util.Size;
import android.view.Surface; import android.view.Surface;
import android.view.View; import android.view.View;
import android.view.ViewTreeObserver; import android.view.ViewTreeObserver;
import android.view.WindowManager; import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter; import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ImageView; import android.widget.ImageView;
import android.widget.LinearLayout; import android.widget.LinearLayout;
import android.widget.ListView; import android.widget.ListView;
...@@ -61,11 +57,8 @@ import java.util.ArrayList; ...@@ -61,11 +57,8 @@ import java.util.ArrayList;
import com.agenew.detection.env.ImageUtils; import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger; import com.agenew.detection.env.Logger;
public abstract class CameraActivity extends AppCompatActivity public abstract class CameraActivity extends Activity
implements OnImageAvailableListener, implements OnImageAvailableListener, Camera.PreviewCallback, View.OnClickListener {
Camera.PreviewCallback,
// CompoundButton.OnCheckedChangeListener,
View.OnClickListener {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
private static final int PERMISSIONS_REQUEST = 1; private static final int PERMISSIONS_REQUEST = 1;
...@@ -86,7 +79,7 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -86,7 +79,7 @@ public abstract class CameraActivity extends AppCompatActivity
protected int defaultDeviceIndex = 2; protected int defaultDeviceIndex = 2;
private Runnable postInferenceCallback; private Runnable postInferenceCallback;
private Runnable imageConverter; private Runnable imageConverter;
protected ArrayList<String> modelStrings = new ArrayList<String>(); protected ArrayList<String> modelStrings = new ArrayList<>();
private LinearLayout bottomSheetLayout; private LinearLayout bottomSheetLayout;
private LinearLayout gestureLayout; private LinearLayout gestureLayout;
...@@ -103,7 +96,7 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -103,7 +96,7 @@ public abstract class CameraActivity extends AppCompatActivity
int currentModel = -1; int currentModel = -1;
int currentNumThreads = -1; int currentNumThreads = -1;
ArrayList<String> deviceStrings = new ArrayList<String>(); ArrayList<String> deviceStrings = new ArrayList<>();
@Override @Override
protected void onCreate(final Bundle savedInstanceState) { protected void onCreate(final Bundle savedInstanceState) {
...@@ -112,9 +105,6 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -112,9 +105,6 @@ public abstract class CameraActivity extends AppCompatActivity
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tfe_od_activity_camera); setContentView(R.layout.tfe_od_activity_camera);
// Toolbar toolbar = findViewById(R.id.toolbar);
// setSupportActionBar(toolbar);
// getSupportActionBar().setDisplayShowTitleEnabled(false);
if (hasPermission()) { if (hasPermission()) {
setFragment(); setFragment();
...@@ -131,19 +121,12 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -131,19 +121,12 @@ public abstract class CameraActivity extends AppCompatActivity
deviceStrings.add("GPU"); deviceStrings.add("GPU");
deviceStrings.add("NNAPI"); deviceStrings.add("NNAPI");
deviceView.setChoiceMode(ListView.CHOICE_MODE_SINGLE); deviceView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
ArrayAdapter<String> deviceAdapter = ArrayAdapter<String> deviceAdapter = new ArrayAdapter<>(this, R.layout.deviceview_row,
new ArrayAdapter<>( R.id.deviceview_row_text, deviceStrings);
CameraActivity.this , R.layout.deviceview_row, R.id.deviceview_row_text, deviceStrings);
deviceView.setAdapter(deviceAdapter); deviceView.setAdapter(deviceAdapter);
deviceView.setItemChecked(defaultDeviceIndex, true); deviceView.setItemChecked(defaultDeviceIndex, true);
currentDevice = defaultDeviceIndex; currentDevice = defaultDeviceIndex;
deviceView.setOnItemClickListener( deviceView.setOnItemClickListener((parent, view, position, id) -> updateActiveModel());
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel();
}
});
bottomSheetLayout = findViewById(R.id.bottom_sheet_layout); bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
gestureLayout = findViewById(R.id.gesture_layout); gestureLayout = findViewById(R.id.gesture_layout);
...@@ -153,23 +136,15 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -153,23 +136,15 @@ public abstract class CameraActivity extends AppCompatActivity
modelStrings = getModelStrings(getAssets(), ASSET_PATH); modelStrings = getModelStrings(getAssets(), ASSET_PATH);
modelView.setChoiceMode(ListView.CHOICE_MODE_SINGLE); modelView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
ArrayAdapter<String> modelAdapter = ArrayAdapter<String> modelAdapter = new ArrayAdapter<>(this, R.layout.listview_row,
new ArrayAdapter<>( R.id.listview_row_text, modelStrings);
CameraActivity.this , R.layout.listview_row, R.id.listview_row_text, modelStrings);
modelView.setAdapter(modelAdapter); modelView.setAdapter(modelAdapter);
modelView.setItemChecked(defaultModelIndex, true); modelView.setItemChecked(defaultModelIndex, true);
currentModel = defaultModelIndex; currentModel = defaultModelIndex;
modelView.setOnItemClickListener( modelView.setOnItemClickListener((parent, view, position, id) -> updateActiveModel());
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel();
}
});
ViewTreeObserver vto = gestureLayout.getViewTreeObserver(); ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
vto.addOnGlobalLayoutListener( vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
new ViewTreeObserver.OnGlobalLayoutListener() {
@Override @Override
public void onGlobalLayout() { public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) { if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
...@@ -177,7 +152,6 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -177,7 +152,6 @@ public abstract class CameraActivity extends AppCompatActivity
} else { } else {
gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this); gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
} }
// int width = bottomSheetLayout.getMeasuredWidth();
int height = gestureLayout.getMeasuredHeight(); int height = gestureLayout.getMeasuredHeight();
sheetBehavior.setPeekHeight(height); sheetBehavior.setPeekHeight(height);
...@@ -185,20 +159,17 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -185,20 +159,17 @@ public abstract class CameraActivity extends AppCompatActivity
}); });
sheetBehavior.setHideable(false); sheetBehavior.setHideable(false);
sheetBehavior.setBottomSheetCallback( sheetBehavior.setBottomSheetCallback(new BottomSheetBehavior.BottomSheetCallback() {
new BottomSheetBehavior.BottomSheetCallback() {
@Override @Override
public void onStateChanged(@NonNull View bottomSheet, int newState) { public void onStateChanged(@NonNull View bottomSheet, int newState) {
switch (newState) { switch (newState) {
case BottomSheetBehavior.STATE_HIDDEN: case BottomSheetBehavior.STATE_HIDDEN:
break; break;
case BottomSheetBehavior.STATE_EXPANDED: case BottomSheetBehavior.STATE_EXPANDED: {
{
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down); bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
} }
break; break;
case BottomSheetBehavior.STATE_COLLAPSED: case BottomSheetBehavior.STATE_COLLAPSED: {
{
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up); bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
} }
break; break;
...@@ -211,7 +182,8 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -211,7 +182,8 @@ public abstract class CameraActivity extends AppCompatActivity
} }
@Override @Override
public void onSlide(@NonNull View bottomSheet, float slideOffset) {} public void onSlide(@NonNull View bottomSheet, float slideOffset) {
}
}); });
frameValueTextView = findViewById(R.id.frame_info); frameValueTextView = findViewById(R.id.frame_info);
...@@ -222,10 +194,8 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -222,10 +194,8 @@ public abstract class CameraActivity extends AppCompatActivity
minusImageView.setOnClickListener(this); minusImageView.setOnClickListener(this);
} }
protected ArrayList<String> getModelStrings(AssetManager mgr, String path) {
ArrayList<String> res = new ArrayList<>();
protected ArrayList<String> getModelStrings(AssetManager mgr, String path){
ArrayList<String> res = new ArrayList<String>();
try { try {
String[] files = mgr.list(path); String[] files = mgr.list(path);
for (String file : files) { for (String file : files) {
...@@ -235,8 +205,7 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -235,8 +205,7 @@ public abstract class CameraActivity extends AppCompatActivity
} }
} }
} } catch (IOException e) {
catch (IOException e){
System.err.println("getModelStrings: " + e.getMessage()); System.err.println("getModelStrings: " + e.getMessage());
} }
return res; return res;
...@@ -247,14 +216,6 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -247,14 +216,6 @@ public abstract class CameraActivity extends AppCompatActivity
return rgbBytes; return rgbBytes;
} }
protected int getLuminanceStride() {
return yRowStride;
}
protected byte[] getLuminance() {
return yuvBytes[0];
}
/** Callback for android.hardware.Camera API */ /** Callback for android.hardware.Camera API */
@Override @Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) { public void onPreviewFrame(final byte[] bytes, final Camera camera) {
...@@ -281,16 +242,9 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -281,16 +242,9 @@ public abstract class CameraActivity extends AppCompatActivity
yuvBytes[0] = bytes; yuvBytes[0] = bytes;
yRowStride = previewWidth; yRowStride = previewWidth;
imageConverter = imageConverter = () -> ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
}
};
postInferenceCallback = postInferenceCallback = new Runnable() {
new Runnable() {
@Override @Override
public void run() { public void run() {
camera.addCallbackBuffer(bytes); camera.addCallbackBuffer(bytes);
...@@ -329,25 +283,10 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -329,25 +283,10 @@ public abstract class CameraActivity extends AppCompatActivity
final int uvRowStride = planes[1].getRowStride(); final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride(); final int uvPixelStride = planes[1].getPixelStride();
imageConverter = imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], previewWidth,
new Runnable() { previewHeight, yRowStride, uvRowStride, uvPixelStride, rgbBytes);
@Override
public void run() {
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
}
};
postInferenceCallback = postInferenceCallback = new Runnable() {
new Runnable() {
@Override @Override
public void run() { public void run() {
image.close(); image.close();
...@@ -415,8 +354,8 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -415,8 +354,8 @@ public abstract class CameraActivity extends AppCompatActivity
} }
@Override @Override
public void onRequestPermissionsResult( public void onRequestPermissionsResult(final int requestCode, final String[] permissions,
final int requestCode, final String[] permissions, final int[] grantResults) { final int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults); super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST) { if (requestCode == PERMISSIONS_REQUEST) {
if (allPermissionsGranted(grantResults)) { if (allPermissionsGranted(grantResults)) {
...@@ -447,19 +386,15 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -447,19 +386,15 @@ public abstract class CameraActivity extends AppCompatActivity
private void requestPermission() { private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) { if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
Toast.makeText( Toast.makeText(CameraActivity.this, "Camera permission is required for this demo", Toast.LENGTH_LONG)
CameraActivity.this,
"Camera permission is required for this demo",
Toast.LENGTH_LONG)
.show(); .show();
} }
requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST); requestPermissions(new String[] { PERMISSION_CAMERA }, PERMISSIONS_REQUEST);
} }
} }
// Returns true if the device supports the required hardware level, or better. // Returns true if the device supports the required hardware level, or better.
private boolean isHardwareLevelSupported( private boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) {
CameraCharacteristics characteristics, int requiredLevel) {
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) { if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel; return requiredLevel == deviceLevel;
...@@ -480,8 +415,8 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -480,8 +415,8 @@ public abstract class CameraActivity extends AppCompatActivity
continue; continue;
} }
final StreamConfigurationMap map = final StreamConfigurationMap map = characteristics
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) { if (map == null) {
continue; continue;
...@@ -490,9 +425,7 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -490,9 +425,7 @@ public abstract class CameraActivity extends AppCompatActivity
// Fallback to camera1 API for internal cameras that don't have full support. // Fallback to camera1 API for internal cameras that don't have full support.
// This should help with legacy situations where using the camera2 API causes // This should help with legacy situations where using the camera2 API causes
// distorted or otherwise broken previews. // distorted or otherwise broken previews.
useCamera2API = useCamera2API = (facing == CameraCharacteristics.LENS_FACING_EXTERNAL) || isHardwareLevelSupported(
(facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
|| isHardwareLevelSupported(
characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL); characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
LOGGER.i("Camera API lv2?: %s", useCamera2API); LOGGER.i("Camera API lv2?: %s", useCamera2API);
return cameraId; return cameraId;
...@@ -509,25 +442,17 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -509,25 +442,17 @@ public abstract class CameraActivity extends AppCompatActivity
Fragment fragment; Fragment fragment;
if (useCamera2API) { if (useCamera2API) {
CameraConnectionFragment camera2Fragment = CameraConnectionFragment camera2Fragment = CameraConnectionFragment
CameraConnectionFragment.newInstance( .newInstance((size, rotation) -> {
new CameraConnectionFragment.ConnectionCallback() {
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
previewHeight = size.getHeight(); previewHeight = size.getHeight();
previewWidth = size.getWidth(); previewWidth = size.getWidth();
CameraActivity.this.onPreviewSizeChosen(size, rotation); CameraActivity.this.onPreviewSizeChosen(size, rotation);
} }, this, getLayoutId(), getDesiredPreviewFrameSize());
},
this,
getLayoutId(),
getDesiredPreviewFrameSize());
camera2Fragment.setCamera(cameraId); camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment; fragment = camera2Fragment;
} else { } else {
fragment = fragment = new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
} }
getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit(); getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
...@@ -569,19 +494,13 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -569,19 +494,13 @@ public abstract class CameraActivity extends AppCompatActivity
} }
} }
// @Override
// public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// setUseNNAPI(isChecked);
// if (isChecked) apiSwitchCompat.setText("NNAPI");
// else apiSwitchCompat.setText("TFLITE");
// }
@Override @Override
public void onClick(View v) { public void onClick(View v) {
if (v.getId() == R.id.plus) { if (v.getId() == R.id.plus) {
String threads = threadsTextView.getText().toString().trim(); String threads = threadsTextView.getText().toString().trim();
int numThreads = Integer.parseInt(threads); int numThreads = Integer.parseInt(threads);
if (numThreads >= 9) return; if (numThreads >= 9)
return;
numThreads++; numThreads++;
threadsTextView.setText(String.valueOf(numThreads)); threadsTextView.setText(String.valueOf(numThreads));
setNumThreads(numThreads); setNumThreads(numThreads);
...@@ -610,6 +529,7 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -610,6 +529,7 @@ public abstract class CameraActivity extends AppCompatActivity
} }
protected abstract void updateActiveModel(); protected abstract void updateActiveModel();
protected abstract void processImage(); protected abstract void processImage();
protected abstract void onPreviewSizeChosen(final Size size, final int rotation); protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
...@@ -619,6 +539,4 @@ public abstract class CameraActivity extends AppCompatActivity ...@@ -619,6 +539,4 @@ public abstract class CameraActivity extends AppCompatActivity
protected abstract Size getDesiredPreviewFrameSize(); protected abstract Size getDesiredPreviewFrameSize();
protected abstract void setNumThreads(int numThreads); protected abstract void setNumThreads(int numThreads);
protected abstract void setUseNNAPI(boolean isChecked);
} }
...@@ -23,7 +23,6 @@ import android.app.Dialog; ...@@ -23,7 +23,6 @@ import android.app.Dialog;
import android.app.DialogFragment; import android.app.DialogFragment;
import android.app.Fragment; import android.app.Fragment;
import android.content.Context; import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration; import android.content.res.Configuration;
import android.graphics.ImageFormat; import android.graphics.ImageFormat;
import android.graphics.Matrix; import android.graphics.Matrix;
...@@ -67,8 +66,8 @@ public class CameraConnectionFragment extends Fragment { ...@@ -67,8 +66,8 @@ public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
/** /**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of * The camera preview size will be chosen to be the smallest frame by pixel size
* containing a DESIRED_SIZE x DESIRED_SIZE square. * capable of containing a DESIRED_SIZE x DESIRED_SIZE square.
*/ */
private static final int MINIMUM_PREVIEW_SIZE = 320; private static final int MINIMUM_PREVIEW_SIZE = 320;
...@@ -84,29 +83,34 @@ public class CameraConnectionFragment extends Fragment { ...@@ -84,29 +83,34 @@ public class CameraConnectionFragment extends Fragment {
ORIENTATIONS.append(Surface.ROTATION_270, 180); ORIENTATIONS.append(Surface.ROTATION_270, 180);
} }
/** A {@link Semaphore} to prevent the app from exiting before closing the camera. */ /**
* A {@link Semaphore} to prevent the app from exiting before closing the
* camera.
*/
private final Semaphore cameraOpenCloseLock = new Semaphore(1); private final Semaphore cameraOpenCloseLock = new Semaphore(1);
/** A {@link OnImageAvailableListener} to receive frames as they are available. */ /**
* A {@link OnImageAvailableListener} to receive frames as they are available.
*/
private final OnImageAvailableListener imageListener; private final OnImageAvailableListener imageListener;
/** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */ /**
* The input size in pixels desired by TensorFlow (width and height of a square
* bitmap).
*/
private final Size inputSize; private final Size inputSize;
/** The layout identifier to inflate for this Fragment. */ /** The layout identifier to inflate for this Fragment. */
private final int layout; private final int layout;
private final ConnectionCallback cameraConnectionCallback; private final ConnectionCallback cameraConnectionCallback;
private final CameraCaptureSession.CaptureCallback captureCallback = private final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
new CameraCaptureSession.CaptureCallback() {
@Override @Override
public void onCaptureProgressed( public void onCaptureProgressed(final CameraCaptureSession session, final CaptureRequest request,
final CameraCaptureSession session, final CaptureResult partialResult) {
final CaptureRequest request, }
final CaptureResult partialResult) {}
@Override @Override
public void onCaptureCompleted( public void onCaptureCompleted(final CameraCaptureSession session, final CaptureRequest request,
final CameraCaptureSession session, final TotalCaptureResult result) {
final CaptureRequest request, }
final TotalCaptureResult result) {}
}; };
/** ID of the current {@link CameraDevice}. */ /** ID of the current {@link CameraDevice}. */
private String cameraId; private String cameraId;
...@@ -130,12 +134,15 @@ public class CameraConnectionFragment extends Fragment { ...@@ -130,12 +134,15 @@ public class CameraConnectionFragment extends Fragment {
private CaptureRequest.Builder previewRequestBuilder; private CaptureRequest.Builder previewRequestBuilder;
/** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */ /** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
private CaptureRequest previewRequest; private CaptureRequest previewRequest;
/** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */ /**
private final CameraDevice.StateCallback stateCallback = * {@link CameraDevice.StateCallback} is called when {@link CameraDevice}
new CameraDevice.StateCallback() { * changes its state.
*/
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override @Override
public void onOpened(final CameraDevice cd) { public void onOpened(final CameraDevice cd) {
// This method is called when the camera is opened. We start camera preview here. // This method is called when the camera is opened. We start camera preview
// here.
cameraOpenCloseLock.release(); cameraOpenCloseLock.release();
cameraDevice = cd; cameraDevice = cd;
createCameraPreviewSession(); createCameraPreviewSession();
...@@ -160,20 +167,17 @@ public class CameraConnectionFragment extends Fragment { ...@@ -160,20 +167,17 @@ public class CameraConnectionFragment extends Fragment {
} }
}; };
/** /**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link * {@link TextureView.SurfaceTextureListener} handles several lifecycle events
* TextureView}. * on a {@link TextureView}.
*/ */
private final TextureView.SurfaceTextureListener surfaceTextureListener = private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
new TextureView.SurfaceTextureListener() {
@Override @Override
public void onSurfaceTextureAvailable( public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
final SurfaceTexture texture, final int width, final int height) {
openCamera(width, height); openCamera(width, height);
} }
@Override @Override
public void onSurfaceTextureSizeChanged( public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
final SurfaceTexture texture, final int width, final int height) {
configureTransform(width, height); configureTransform(width, height);
} }
...@@ -183,14 +187,12 @@ public class CameraConnectionFragment extends Fragment { ...@@ -183,14 +187,12 @@ public class CameraConnectionFragment extends Fragment {
} }
@Override @Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
}; };
private CameraConnectionFragment( private CameraConnectionFragment(final ConnectionCallback connectionCallback,
final ConnectionCallback connectionCallback, final OnImageAvailableListener imageListener, final int layout, final Size inputSize) {
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
this.cameraConnectionCallback = connectionCallback; this.cameraConnectionCallback = connectionCallback;
this.imageListener = imageListener; this.imageListener = imageListener;
this.layout = layout; this.layout = layout;
...@@ -198,10 +200,12 @@ public class CameraConnectionFragment extends Fragment { ...@@ -198,10 +200,12 @@ public class CameraConnectionFragment extends Fragment {
} }
/** /**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose * Given {@code choices} of {@code Size}s supported by a camera, chooses the
* width and height are at least as large as the minimum of both, or an exact match if possible. * smallest one whose width and height are at least as large as the minimum of
* both, or an exact match if possible.
* *
* @param choices The list of sizes that the camera supports for the intended output class * @param choices The list of sizes that the camera supports for the intended
* output class
* @param width The minimum desired width * @param width The minimum desired width
* @param height The minimum desired height * @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough * @return The optimal {@code Size}, or an arbitrary one if none were big enough
...@@ -210,13 +214,15 @@ public class CameraConnectionFragment extends Fragment { ...@@ -210,13 +214,15 @@ public class CameraConnectionFragment extends Fragment {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE); final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height); final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface // Collect the supported resolutions that are at least as big as the preview
// Surface
boolean exactSizeFound = false; boolean exactSizeFound = false;
final List<Size> bigEnough = new ArrayList<Size>(); final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>(); final List<Size> tooSmall = new ArrayList<Size>();
for (final Size option : choices) { for (final Size option : choices) {
if (option.equals(desiredSize)) { if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged. // Set the size but don't return yet so that remaining sizes will still be
// logged.
exactSizeFound = true; exactSizeFound = true;
} }
...@@ -247,11 +253,8 @@ public class CameraConnectionFragment extends Fragment { ...@@ -247,11 +253,8 @@ public class CameraConnectionFragment extends Fragment {
} }
} }
public static CameraConnectionFragment newInstance( public static CameraConnectionFragment newInstance(final ConnectionCallback callback,
final ConnectionCallback callback, final OnImageAvailableListener imageListener, final int layout, final Size inputSize) {
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
return new CameraConnectionFragment(callback, imageListener, layout, inputSize); return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
} }
...@@ -263,8 +266,7 @@ public class CameraConnectionFragment extends Fragment { ...@@ -263,8 +266,7 @@ public class CameraConnectionFragment extends Fragment {
private void showToast(final String text) { private void showToast(final String text) {
final Activity activity = getActivity(); final Activity activity = getActivity();
if (activity != null) { if (activity != null) {
activity.runOnUiThread( activity.runOnUiThread(new Runnable() {
new Runnable() {
@Override @Override
public void run() { public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show(); Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
...@@ -274,8 +276,8 @@ public class CameraConnectionFragment extends Fragment { ...@@ -274,8 +276,8 @@ public class CameraConnectionFragment extends Fragment {
} }
@Override @Override
public View onCreateView( public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false); return inflater.inflate(layout, container, false);
} }
...@@ -294,9 +296,12 @@ public class CameraConnectionFragment extends Fragment { ...@@ -294,9 +296,12 @@ public class CameraConnectionFragment extends Fragment {
super.onResume(); super.onResume();
startBackgroundThread(); startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already // When the screen is turned off and turned back on, the SurfaceTexture is
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // already
// a camera and start preview from here (otherwise, we wait until the surface is ready in // available, and "onSurfaceTextureAvailable" will not be called. In that case,
// we can open
// a camera and start preview from here (otherwise, we wait until the surface is
// ready in
// the SurfaceTextureListener). // the SurfaceTextureListener).
if (textureView.isAvailable()) { if (textureView.isAvailable()) {
openCamera(textureView.getWidth(), textureView.getHeight()); openCamera(textureView.getWidth(), textureView.getHeight());
...@@ -323,18 +328,16 @@ public class CameraConnectionFragment extends Fragment { ...@@ -323,18 +328,16 @@ public class CameraConnectionFragment extends Fragment {
try { try {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId); final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final StreamConfigurationMap map = final StreamConfigurationMap map = characteristics
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); .get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION); sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera // Danger, W.R.! Attempting to use too large a preview size could exceed the
// camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of // bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data. // garbage capture data.
previewSize = previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), inputSize.getWidth(),
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
inputSize.getHeight()); inputSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked. // We fit the aspect ratio of TextureView to the size of preview we picked.
...@@ -347,10 +350,11 @@ public class CameraConnectionFragment extends Fragment { ...@@ -347,10 +350,11 @@ public class CameraConnectionFragment extends Fragment {
} catch (final CameraAccessException e) { } catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!"); LOGGER.e(e, "Exception!");
} catch (final NullPointerException e) { } catch (final NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the // Currently an NPE is thrown when the Camera2API is used but not supported on
// the
// device this code runs. // device this code runs.
ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)) ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)).show(getChildFragmentManager(),
.show(getChildFragmentManager(), FRAGMENT_DIALOG); FRAGMENT_DIALOG);
throw new IllegalStateException(getString(R.string.tfe_od_camera_error)); throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
} }
...@@ -423,7 +427,8 @@ public class CameraConnectionFragment extends Fragment { ...@@ -423,7 +427,8 @@ public class CameraConnectionFragment extends Fragment {
final SurfaceTexture texture = textureView.getSurfaceTexture(); final SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null; assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want. // We configure the size of default buffer to be the size of camera preview we
// want.
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight()); texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// This is the output Surface we need to start preview. // This is the output Surface we need to start preview.
...@@ -436,16 +441,14 @@ public class CameraConnectionFragment extends Fragment { ...@@ -436,16 +441,14 @@ public class CameraConnectionFragment extends Fragment {
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight()); LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
// Create the reader for the preview frames. // Create the reader for the preview frames.
previewReader = previewReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(),
ImageReader.newInstance( ImageFormat.YUV_420_888, 2);
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
previewReader.setOnImageAvailableListener(imageListener, backgroundHandler); previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
previewRequestBuilder.addTarget(previewReader.getSurface()); previewRequestBuilder.addTarget(previewReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview. // Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession( cameraDevice.createCaptureSession(Arrays.asList(surface, previewReader.getSurface()),
Arrays.asList(surface, previewReader.getSurface()),
new CameraCaptureSession.StateCallback() { new CameraCaptureSession.StateCallback() {
@Override @Override
...@@ -459,17 +462,15 @@ public class CameraConnectionFragment extends Fragment { ...@@ -459,17 +462,15 @@ public class CameraConnectionFragment extends Fragment {
captureSession = cameraCaptureSession; captureSession = cameraCaptureSession;
try { try {
// Auto focus should be continuous for camera preview. // Auto focus should be continuous for camera preview.
previewRequestBuilder.set( previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE); CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary. // Flash is automatically enabled when necessary.
previewRequestBuilder.set( previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH); CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start displaying the camera preview. // Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build(); previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest( captureSession.setRepeatingRequest(previewRequest, captureCallback, backgroundHandler);
previewRequest, captureCallback, backgroundHandler);
} catch (final CameraAccessException e) { } catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!"); LOGGER.e(e, "Exception!");
} }
...@@ -479,17 +480,16 @@ public class CameraConnectionFragment extends Fragment { ...@@ -479,17 +480,16 @@ public class CameraConnectionFragment extends Fragment {
public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) { public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
showToast("Failed"); showToast("Failed");
} }
}, }, null);
null);
} catch (final CameraAccessException e) { } catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!"); LOGGER.e(e, "Exception!");
} }
} }
/** /**
* Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be * Configures the necessary {@link Matrix} transformation to `mTextureView`.
* called after the camera preview size is determined in setUpCameraOutputs and also the size of * This method should be called after the camera preview size is determined in
* `mTextureView` is fixed. * setUpCameraOutputs and also the size of `mTextureView` is fixed.
* *
* @param viewWidth The width of `mTextureView` * @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView` * @param viewHeight The height of `mTextureView`
...@@ -508,9 +508,7 @@ public class CameraConnectionFragment extends Fragment { ...@@ -508,9 +508,7 @@ public class CameraConnectionFragment extends Fragment {
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) { if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY()); bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL); matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
final float scale = final float scale = Math.max((float) viewHeight / previewSize.getHeight(),
Math.max(
(float) viewHeight / previewSize.getHeight(),
(float) viewWidth / previewSize.getWidth()); (float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY); matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY); matrix.postRotate(90 * (rotation - 2), centerX, centerY);
...@@ -521,8 +519,8 @@ public class CameraConnectionFragment extends Fragment { ...@@ -521,8 +519,8 @@ public class CameraConnectionFragment extends Fragment {
} }
/** /**
* Callback for Activities to use to initialize their data once the selected preview size is * Callback for Activities to use to initialize their data once the selected
* known. * preview size is known.
*/ */
public interface ConnectionCallback { public interface ConnectionCallback {
void onPreviewSizeChosen(Size size, int cameraRotation); void onPreviewSizeChosen(Size size, int cameraRotation);
...@@ -533,8 +531,7 @@ public class CameraConnectionFragment extends Fragment { ...@@ -533,8 +531,7 @@ public class CameraConnectionFragment extends Fragment {
@Override @Override
public int compare(final Size lhs, final Size rhs) { public int compare(final Size lhs, final Size rhs) {
// We cast here to ensure the multiplications won't overflow // We cast here to ensure the multiplications won't overflow
return Long.signum( return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
(long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
} }
} }
...@@ -553,17 +550,8 @@ public class CameraConnectionFragment extends Fragment { ...@@ -553,17 +550,8 @@ public class CameraConnectionFragment extends Fragment {
@Override @Override
public Dialog onCreateDialog(final Bundle savedInstanceState) { public Dialog onCreateDialog(final Bundle savedInstanceState) {
final Activity activity = getActivity(); final Activity activity = getActivity();
return new AlertDialog.Builder(activity) return new AlertDialog.Builder(activity).setMessage(getArguments().getString(ARG_MESSAGE))
.setMessage(getArguments().getString(ARG_MESSAGE)) .setPositiveButton(android.R.string.ok, (dialogInterface, i) -> activity.finish()).create();
.setPositiveButton(
android.R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialogInterface, final int i) {
activity.finish();
}
})
.create();
} }
} }
} }
...@@ -56,14 +56,12 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -56,14 +56,12 @@ public class LegacyCameraConnectionFragment extends Fragment {
/** An {@link AutoFitTextureView} for camera preview. */ /** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView; private AutoFitTextureView textureView;
/** /**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link * {@link TextureView.SurfaceTextureListener} handles several lifecycle events
* TextureView}. * on a {@link TextureView}.
*/ */
private final TextureView.SurfaceTextureListener surfaceTextureListener = private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
new TextureView.SurfaceTextureListener() {
@Override @Override
public void onSurfaceTextureAvailable( public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId(); int index = getCameraId();
camera = Camera.open(index); camera = Camera.open(index);
...@@ -71,8 +69,7 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -71,8 +69,7 @@ public class LegacyCameraConnectionFragment extends Fragment {
try { try {
Camera.Parameters parameters = camera.getParameters(); Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes(); List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
} }
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes(); List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
...@@ -81,9 +78,8 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -81,9 +78,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
for (Camera.Size size : cameraSizes) { for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height); sizes[i++] = new Size(size.width, size.height);
} }
Size previewSize = Size previewSize = CameraConnectionFragment.chooseOptimalSize(sizes, desiredSize.getWidth(),
CameraConnectionFragment.chooseOptimalSize( desiredSize.getHeight());
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight()); parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90); camera.setDisplayOrientation(90);
camera.setParameters(parameters); camera.setParameters(parameters);
...@@ -102,8 +98,8 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -102,8 +98,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
} }
@Override @Override
public void onSurfaceTextureSizeChanged( public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
final SurfaceTexture texture, final int width, final int height) {} }
@Override @Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) { public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
...@@ -111,21 +107,22 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -111,21 +107,22 @@ public class LegacyCameraConnectionFragment extends Fragment {
} }
@Override @Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {} public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
}; };
/** An additional thread for running tasks that shouldn't block the UI. */ /** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread; private HandlerThread backgroundThread;
public LegacyCameraConnectionFragment( public LegacyCameraConnectionFragment(final Camera.PreviewCallback imageListener, final int layout,
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) { final Size desiredSize) {
this.imageListener = imageListener; this.imageListener = imageListener;
this.layout = layout; this.layout = layout;
this.desiredSize = desiredSize; this.desiredSize = desiredSize;
} }
@Override @Override
public View onCreateView( public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) { final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false); return inflater.inflate(layout, container, false);
} }
...@@ -143,9 +140,12 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -143,9 +140,12 @@ public class LegacyCameraConnectionFragment extends Fragment {
public void onResume() { public void onResume() {
super.onResume(); super.onResume();
startBackgroundThread(); startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already // When the screen is turned off and turned back on, the SurfaceTexture is
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open // already
// a camera and start preview from here (otherwise, we wait until the surface is ready in // available, and "onSurfaceTextureAvailable" will not be called. In that case,
// we can open
// a camera and start preview from here (otherwise, we wait until the surface is
// ready in
// the SurfaceTextureListener). // the SurfaceTextureListener).
if (textureView.isAvailable()) { if (textureView.isAvailable()) {
...@@ -192,7 +192,8 @@ public class LegacyCameraConnectionFragment extends Fragment { ...@@ -192,7 +192,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
CameraInfo ci = new CameraInfo(); CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) { for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci); Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i; if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
return i;
} }
return -1; // No camera found return -1; // No camera found
} }
......
...@@ -47,8 +47,8 @@ import com.agenew.detection.tflite.YoloV5Classifier; ...@@ -47,8 +47,8 @@ import com.agenew.detection.tflite.YoloV5Classifier;
import com.agenew.detection.tracking.MultiBoxTracker; import com.agenew.detection.tracking.MultiBoxTracker;
/** /**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track * An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to
* objects. * detect and then track objects.
*/ */
public class MainActivity extends CameraActivity implements OnImageAvailableListener { public class MainActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
...@@ -82,9 +82,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -82,9 +82,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
@Override @Override
public void onPreviewSizeChosen(final Size size, final int rotation) { public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx = final float textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
TypedValue.applyDimension( getResources().getDisplayMetrics());
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx); borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE); borderedText.setTypeface(Typeface.MONOSPACE);
...@@ -100,9 +99,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -100,9 +99,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
} catch (final IOException e) { } catch (final IOException e) {
e.printStackTrace(); e.printStackTrace();
LOGGER.e(e, "Exception initializing classifier!"); LOGGER.e(e, "Exception initializing classifier!");
Toast toast = Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.makeText( Toast.LENGTH_SHORT);
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show(); toast.show();
finish(); finish();
} }
...@@ -125,18 +123,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -125,18 +123,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888); rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform = frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize, cropSize,
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT); sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix(); cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform); frameToCropTransform.invert(cropToFrameTransform);
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay); trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback( trackingOverlay.addCallback(new DrawCallback() {
new DrawCallback() {
@Override @Override
public void drawCallback(final Canvas canvas) { public void drawCallback(final Canvas canvas) {
tracker.draw(canvas); tracker.draw(canvas);
...@@ -157,8 +151,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -157,8 +151,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
final int numThreads = Integer.parseInt(threads); final int numThreads = Integer.parseInt(threads);
handler.post(() -> { handler.post(() -> {
if (modelIndex == currentModel && deviceIndex == currentDevice if (modelIndex == currentModel && deviceIndex == currentDevice && numThreads == currentNumThreads) {
&& numThreads == currentNumThreads) {
return; return;
} }
currentModel = modelIndex; currentModel = modelIndex;
...@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
if (detector == null) { if (detector == null) {
return; return;
} }
} } catch (IOException e) {
catch(IOException e) {
e.printStackTrace(); e.printStackTrace();
LOGGER.e(e, "Exception in updateActiveModel()"); LOGGER.e(e, "Exception in updateActiveModel()");
Toast toast = Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.makeText( Toast.LENGTH_SHORT);
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show(); toast.show();
finish(); finish();
} }
if (device.equals("CPU")) { if (device.equals("CPU")) {
detector.useCPU(); detector.useCPU();
} else if (device.equals("GPU")) { } else if (device.equals("GPU")) {
...@@ -209,11 +199,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -209,11 +199,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
int cropSize = detector.getInputSize(); int cropSize = detector.getInputSize();
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888); croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform = frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize,
ImageUtils.getTransformationMatrix( cropSize, sensorOrientation, MAINTAIN_ASPECT);
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix(); cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform); frameToCropTransform.invert(cropToFrameTransform);
...@@ -245,8 +232,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -245,8 +232,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
ImageUtils.saveBitmap(croppedBitmap); ImageUtils.saveBitmap(croppedBitmap);
} }
runInBackground( runInBackground(new Runnable() {
new Runnable() {
@Override @Override
public void run() { public void run() {
LOGGER.i("Running detection on image " + currTimestamp); LOGGER.i("Running detection on image " + currTimestamp);
...@@ -270,8 +256,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -270,8 +256,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
break; break;
} }
final List<Classifier.Recognition> mappedRecognitions = final List<Classifier.Recognition> mappedRecognitions = new LinkedList<>();
new LinkedList<Classifier.Recognition>();
for (final Classifier.Recognition result : results) { for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation(); final RectF location = result.getLocation();
...@@ -290,14 +275,10 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -290,14 +275,10 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
computingDetection = false; computingDetection = false;
runOnUiThread( runOnUiThread(() -> {
new Runnable() {
@Override
public void run() {
showFrameInfo(previewWidth + "x" + previewHeight); showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight()); showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms"); showInference(lastProcessingTimeMs + "ms");
}
}); });
} }
}); });
...@@ -313,18 +294,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList ...@@ -313,18 +294,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
return DESIRED_PREVIEW_SIZE; return DESIRED_PREVIEW_SIZE;
} }
// Which detection model to use: by default uses Tensorflow Object Detection API frozen // Which detection model to use: by default uses Tensorflow Object Detection API
// frozen
// checkpoints. // checkpoints.
private enum DetectorMode { private enum DetectorMode {
TF_OD_API; TF_OD_API;
} }
@Override @Override
protected void setUseNNAPI(final boolean isChecked) {
runInBackground(() -> detector.setUseNNAPI(isChecked));
}
@Override
protected void setNumThreads(final int numThreads) { protected void setNumThreads(final int numThreads) {
runInBackground(() -> detector.setNumThreads(numThreads)); runInBackground(() -> detector.setNumThreads(numThreads));
} }
......
...@@ -38,9 +38,10 @@ public class AutoFitTextureView extends TextureView { ...@@ -38,9 +38,10 @@ public class AutoFitTextureView extends TextureView {
} }
/** /**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio * Sets the aspect ratio for this view. The size of the view will be measured
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that is, * based on the ratio calculated from the parameters. Note that the actual sizes
* calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result. * of parameters don't matter, that is, calling setAspectRatio(2, 3) and
* setAspectRatio(4, 6) make the same result.
* *
* @param width Relative horizontal size * @param width Relative horizontal size
* @param height Relative vertical size * @param height Relative vertical size
......
...@@ -18,22 +18,20 @@ package com.agenew.detection.env; ...@@ -18,22 +18,20 @@ package com.agenew.detection.env;
import android.graphics.Canvas; import android.graphics.Canvas;
import android.graphics.Color; import android.graphics.Color;
import android.graphics.Paint; import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style; import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface; import android.graphics.Typeface;
import java.util.Vector;
/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */ /**
* A class that encapsulates the tedious bits of rendering legible, bordered
* text onto a canvas.
*/
public class BorderedText { public class BorderedText {
private final Paint interiorPaint; private final Paint interiorPaint;
private final Paint exteriorPaint; private final Paint exteriorPaint;
private final float textSize;
/** /**
* Creates a left-aligned bordered text object with a white interior, and a black exterior with * Creates a left-aligned bordered text object with a white interior, and a
* the specified text size. * black exterior with the specified text size.
* *
* @param textSize text size in pixels * @param textSize text size in pixels
*/ */
...@@ -42,8 +40,8 @@ public class BorderedText { ...@@ -42,8 +40,8 @@ public class BorderedText {
} }
/** /**
* Create a bordered text object with the specified interior and exterior colors, text size and * Create a bordered text object with the specified interior and exterior
* alignment. * colors, text size and alignment.
* *
* @param interiorColor the interior text color * @param interiorColor the interior text color
* @param exteriorColor the exterior text color * @param exteriorColor the exterior text color
...@@ -64,8 +62,6 @@ public class BorderedText { ...@@ -64,8 +62,6 @@ public class BorderedText {
exteriorPaint.setStrokeWidth(textSize / 8); exteriorPaint.setStrokeWidth(textSize / 8);
exteriorPaint.setAntiAlias(false); exteriorPaint.setAntiAlias(false);
exteriorPaint.setAlpha(255); exteriorPaint.setAlpha(255);
this.textSize = textSize;
} }
public void setTypeface(Typeface typeface) { public void setTypeface(Typeface typeface) {
...@@ -78,8 +74,7 @@ public class BorderedText { ...@@ -78,8 +74,7 @@ public class BorderedText {
canvas.drawText(text, posX, posY, interiorPaint); canvas.drawText(text, posX, posY, interiorPaint);
} }
public void drawText( public void drawText(final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
float width = exteriorPaint.measureText(text); float width = exteriorPaint.measureText(text);
float textSize = exteriorPaint.getTextSize(); float textSize = exteriorPaint.getTextSize();
...@@ -90,39 +85,4 @@ public class BorderedText { ...@@ -90,39 +85,4 @@ public class BorderedText {
canvas.drawText(text, posX, (posY + textSize), interiorPaint); canvas.drawText(text, posX, (posY + textSize), interiorPaint);
} }
public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
int lineNum = 0;
for (final String line : lines) {
drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
++lineNum;
}
}
public void setInteriorColor(final int color) {
interiorPaint.setColor(color);
}
public void setExteriorColor(final int color) {
exteriorPaint.setColor(color);
}
public float getTextSize() {
return textSize;
}
public void setAlpha(final int alpha) {
interiorPaint.setAlpha(alpha);
exteriorPaint.setAlpha(alpha);
}
public void getTextBounds(
final String line, final int index, final int count, final Rect lineBounds) {
interiorPaint.getTextBounds(line, index, count, lineBounds);
}
public void setTextAlign(final Align align) {
interiorPaint.setTextAlign(align);
exteriorPaint.setTextAlign(align);
}
} }
...@@ -23,22 +23,22 @@ import java.io.FileOutputStream; ...@@ -23,22 +23,22 @@ import java.io.FileOutputStream;
/** Utility class for manipulating images. */ /** Utility class for manipulating images. */
public class ImageUtils { public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges // This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits. // are normalized to eight bits.
static final int kMaxChannelValue = 262143; static final int kMaxChannelValue = 262143;
@SuppressWarnings("unused")
private static final Logger LOGGER = new Logger(); private static final Logger LOGGER = new Logger();
/** /**
* Utility method to compute the allocated size in bytes of a YUV420SP image of the given * Utility method to compute the allocated size in bytes of a YUV420SP image of
* dimensions. * the given dimensions.
*/ */
public static int getYUVByteSize(final int width, final int height) { public static int getYUVByteSize(final int width, final int height) {
// The luminance plane requires 1 byte per pixel. // The luminance plane requires 1 byte per pixel.
final int ySize = width * height; final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up. // The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded
// up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V. // Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2; final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
...@@ -61,8 +61,7 @@ public class ImageUtils { ...@@ -61,8 +61,7 @@ public class ImageUtils {
* @param filename The location to save the bitmap to. * @param filename The location to save the bitmap to.
*/ */
public static void saveBitmap(final Bitmap bitmap, final String filename) { public static void saveBitmap(final Bitmap bitmap, final String filename) {
final String root = final String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root); LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
final File myDir = new File(root); final File myDir = new File(root);
...@@ -128,16 +127,8 @@ public class ImageUtils { ...@@ -128,16 +127,8 @@ public class ImageUtils {
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff); return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
} }
public static void convertYUV420ToARGB8888( public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData, int width, int height,
byte[] yData, int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out) {
int yp = 0; int yp = 0;
for (int j = 0; j < height; j++) { for (int j = 0; j < height; j++) {
int pY = yRowStride * j; int pY = yRowStride * j;
...@@ -152,26 +143,21 @@ public class ImageUtils { ...@@ -152,26 +143,21 @@ public class ImageUtils {
} }
/** /**
* Returns a transformation matrix from one reference frame into another. Handles cropping (if * Returns a transformation matrix from one reference frame into another.
* maintaining aspect ratio is desired) and rotation. * Handles cropping (if maintaining aspect ratio is desired) and rotation.
* *
* @param srcWidth Width of source frame. * @param srcWidth Width of source frame.
* @param srcHeight Height of source frame. * @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame. * @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame. * @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple * @param applyRotation Amount of rotation to apply from one frame to
* of 90. * another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant, * @param maintainAspectRatio If true, will ensure that scaling in x and y
* cropping the image if necessary. * remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements. * @return The transformation fulfilling the desired requirements.
*/ */
public static Matrix getTransformationMatrix( public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int srcWidth, final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix(); final Matrix matrix = new Matrix();
if (applyRotation != 0) { if (applyRotation != 0) {
......
...@@ -19,7 +19,10 @@ import android.util.Log; ...@@ -19,7 +19,10 @@ import android.util.Log;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */ /**
* Wrapper for the platform log function, allows convenient message prefixing
* and log disabling.
*/
public final class Logger { public final class Logger {
private static final String DEFAULT_TAG = "tensorflow"; private static final String DEFAULT_TAG = "tensorflow";
private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG; private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
...@@ -39,33 +42,18 @@ public final class Logger { ...@@ -39,33 +42,18 @@ public final class Logger {
private int minLogLevel = DEFAULT_MIN_LOG_LEVEL; private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
/** /**
* Creates a Logger using the class name as the message prefix. * Creates a Logger with a custom tag and a custom message prefix. If the
* message prefix is set to
* *
* @param clazz the simple name of this class is used as the message prefix. * <pre>
*/ * null
public Logger(final Class<?> clazz) { * </pre>
this(clazz.getSimpleName());
}
/**
* Creates a Logger using the specified message prefix.
*
* @param messagePrefix is prepended to the text of every message.
*/
public Logger(final String messagePrefix) {
this(DEFAULT_TAG, messagePrefix);
}
/**
* Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
*
* <pre>null</pre>
* *
* , the caller's class name is used as the prefix. * , the caller's class name is used as the prefix.
* *
* @param tag identifies the source of a log message. * @param tag identifies the source of a log message.
* @param messagePrefix prepended to every message if non-null. If null, the name of the caller is * @param messagePrefix prepended to every message if non-null. If null, the
* being used * name of the caller is being used
*/ */
public Logger(final String tag, final String messagePrefix) { public Logger(final String tag, final String messagePrefix) {
this.tag = tag; this.tag = tag;
...@@ -78,21 +66,17 @@ public final class Logger { ...@@ -78,21 +66,17 @@ public final class Logger {
this(DEFAULT_TAG, null); this(DEFAULT_TAG, null);
} }
/** Creates a Logger using the caller's class name as the message prefix. */
public Logger(final int minLogLevel) {
this(DEFAULT_TAG, null);
this.minLogLevel = minLogLevel;
}
/** /**
* Return caller's simple name. * Return caller's simple name.
* *
* <p>Android getStackTrace() returns an array that looks like this: stackTrace[0]: * <p>
* Android getStackTrace() returns an array that looks like this: stackTrace[0]:
* dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]: * dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
* com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]: * com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
* com.google.android.apps.unveil.BaseApplication * com.google.android.apps.unveil.BaseApplication
* *
* <p>This function returns the simple version of the first non-filtered name. * <p>
* This function returns the simple version of the first non-filtered name.
* *
* @return caller's simple name * @return caller's simple name
*/ */
...@@ -103,7 +87,8 @@ public final class Logger { ...@@ -103,7 +87,8 @@ public final class Logger {
for (final StackTraceElement elem : stackTrace) { for (final StackTraceElement elem : stackTrace) {
final String className = elem.getClassName(); final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) { if (!IGNORED_CLASS_NAMES.contains(className)) {
// We're only interested in the simple name of the class, not the complete package. // We're only interested in the simple name of the class, not the complete
// package.
final String[] classParts = className.split("\\."); final String[] classParts = className.split("\\.");
return classParts[classParts.length - 1]; return classParts[classParts.length - 1];
} }
...@@ -112,10 +97,6 @@ public final class Logger { ...@@ -112,10 +97,6 @@ public final class Logger {
return Logger.class.getSimpleName(); return Logger.class.getSimpleName();
} }
public void setMinLogLevel(final int minLogLevel) {
this.minLogLevel = minLogLevel;
}
public boolean isLoggable(final int logLevel) { public boolean isLoggable(final int logLevel) {
return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel); return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
} }
...@@ -130,24 +111,12 @@ public final class Logger { ...@@ -130,24 +111,12 @@ public final class Logger {
} }
} }
public void v(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args), t);
}
}
public void d(final String format, final Object... args) { public void d(final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) { if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args)); Log.d(tag, toMessage(format, args));
} }
} }
public void d(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args), t);
}
}
public void i(final String format, final Object... args) { public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) { if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args)); Log.i(tag, toMessage(format, args));
...@@ -166,12 +135,6 @@ public final class Logger { ...@@ -166,12 +135,6 @@ public final class Logger {
} }
} }
public void w(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args), t);
}
}
public void e(final String format, final Object... args) { public void e(final String format, final Object... args) {
if (isLoggable(Log.ERROR)) { if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args)); Log.e(tag, toMessage(format, args));
......
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Bitmap;
import android.text.TextUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/** Size class independent of a Camera object. */
public class Size implements Comparable<Size>, Serializable {
// 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
// upgrading.
public static final long serialVersionUID = 7689808733290872361L;
public final int width;
public final int height;
public Size(final int width, final int height) {
this.width = width;
this.height = height;
}
public Size(final Bitmap bmp) {
this.width = bmp.getWidth();
this.height = bmp.getHeight();
}
/**
* Rotate a size by the given number of degrees.
*
* @param size Size to rotate.
* @param rotation Degrees {0, 90, 180, 270} to rotate the size.
* @return Rotated size.
*/
public static Size getRotatedSize(final Size size, final int rotation) {
if (rotation % 180 != 0) {
// The phone is portrait, therefore the camera is sideways and frame should be rotated.
return new Size(size.height, size.width);
}
return size;
}
public static Size parseFromString(String sizeString) {
if (TextUtils.isEmpty(sizeString)) {
return null;
}
sizeString = sizeString.trim();
// The expected format is "<width>x<height>".
final String[] components = sizeString.split("x");
if (components.length == 2) {
try {
final int width = Integer.parseInt(components[0]);
final int height = Integer.parseInt(components[1]);
return new Size(width, height);
} catch (final NumberFormatException e) {
return null;
}
} else {
return null;
}
}
public static List<Size> sizeStringToList(final String sizes) {
final List<Size> sizeList = new ArrayList<Size>();
if (sizes != null) {
final String[] pairs = sizes.split(",");
for (final String pair : pairs) {
final Size size = Size.parseFromString(pair);
if (size != null) {
sizeList.add(size);
}
}
}
return sizeList;
}
public static String sizeListToString(final List<Size> sizes) {
String sizesString = "";
if (sizes != null && sizes.size() > 0) {
sizesString = sizes.get(0).toString();
for (int i = 1; i < sizes.size(); i++) {
sizesString += "," + sizes.get(i).toString();
}
}
return sizesString;
}
public static final String dimensionsAsString(final int width, final int height) {
return width + "x" + height;
}
public final float aspectRatio() {
return (float) width / (float) height;
}
@Override
public int compareTo(final Size other) {
return width * height - other.width * other.height;
}
@Override
public boolean equals(final Object other) {
if (other == null) {
return false;
}
if (!(other instanceof Size)) {
return false;
}
final Size otherSize = (Size) other;
return (width == otherSize.width && height == otherSize.height);
}
@Override
public int hashCode() {
return width * 32713 + height;
}
@Override
public String toString() {
return dimensionsAsString(width, height);
}
}
package com.agenew.detection.env; package com.agenew.detection.env;
import android.content.Context;
import android.content.res.AssetFileDescriptor; import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager; import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.os.Environment;
import android.util.Log; import android.util.Log;
import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.nio.MappedByteBuffer; import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel; import java.nio.channels.FileChannel;
...@@ -63,149 +54,4 @@ public class Utils { ...@@ -63,149 +54,4 @@ public class Utils {
return result; return result;
} }
public static void softmax(final float[] vals) {
float max = Float.NEGATIVE_INFINITY;
for (final float val : vals) {
max = Math.max(max, val);
}
float sum = 0.0f;
for (int i = 0; i < vals.length; ++i) {
vals[i] = (float) Math.exp(vals[i] - max);
sum += vals[i];
}
for (int i = 0; i < vals.length; ++i) {
vals[i] = vals[i] / sum;
}
}
public static float expit(final float x) {
return (float) (1. / (1. + Math.exp(-x)));
}
// public static Bitmap scale(Context context, String filePath) {
// AssetManager assetManager = context.getAssets();
//
// InputStream istr;
// Bitmap bitmap = null;
// try {
// istr = assetManager.open(filePath);
// bitmap = BitmapFactory.decodeStream(istr);
// bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
// } catch (IOException e) {
// // handle exception
// Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
// }
//
// return bitmap;
// }
public static Bitmap getBitmapFromAsset(Context context, String filePath) {
AssetManager assetManager = context.getAssets();
InputStream istr;
Bitmap bitmap = null;
try {
istr = assetManager.open(filePath);
bitmap = BitmapFactory.decodeStream(istr);
// return bitmap.copy(Bitmap.Config.ARGB_8888,true);
} catch (IOException e) {
// handle exception
Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
}
return bitmap;
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to
* another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y
* remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
public static Bitmap processBitmap(Bitmap source, int size) {
int image_height = source.getHeight();
int image_width = source.getWidth();
Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransformations = getTransformationMatrix(image_width, image_height, size, size, 0, false);
Matrix cropToFrameTransformations = new Matrix();
frameToCropTransformations.invert(cropToFrameTransformations);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(source, frameToCropTransformations, null);
return croppedBitmap;
}
public static void writeToFile(String data, Context context) {
try {
String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
String fileName = "myFile.txt";
File file = new File(baseDir + File.separator + fileName);
FileOutputStream stream = new FileOutputStream(file);
try {
stream.write(data.getBytes());
} finally {
stream.close();
}
} catch (IOException e) {
Log.e("Exception", "File write failed: " + e.toString());
}
}
} }
...@@ -34,8 +34,6 @@ public interface Classifier { ...@@ -34,8 +34,6 @@ public interface Classifier {
void setNumThreads(int num_threads); void setNumThreads(int num_threads);
void setUseNNAPI(boolean isChecked);
abstract float getObjThresh(); abstract float getObjThresh();
/** /**
......
...@@ -5,30 +5,19 @@ import android.content.res.AssetManager; ...@@ -5,30 +5,19 @@ import android.content.res.AssetManager;
import java.io.IOException; import java.io.IOException;
public class DetectorFactory { public class DetectorFactory {
public static YoloV5Classifier getDetector( public static YoloV5Classifier getDetector(final AssetManager assetManager, final String modelFilename)
final AssetManager assetManager,
final String modelFilename)
throws IOException { throws IOException {
String labelFilename = null; String labelFilename = null;
boolean isQuantized = false; boolean isQuantized = false;
int inputSize = 0; int inputSize = 0;
int[] output_width = new int[]{0};
int[][] masks = new int[][]{{0}};
int[] anchors = new int[]{0};
if (modelFilename.endsWith(".tflite")) { if (modelFilename.endsWith(".tflite")) {
labelFilename = "file:///android_asset/class.txt"; labelFilename = "file:///android_asset/class.txt";
isQuantized = modelFilename.endsWith("-int8.tflite"); isQuantized = modelFilename.endsWith("-int8.tflite");
inputSize = 640; inputSize = 640;
output_width = new int[]{80, 40, 20};
masks = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
anchors = new int[]{
10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
};
} }
return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, inputSize);
inputSize);
} }
} }
...@@ -18,7 +18,6 @@ package com.agenew.detection.tflite; ...@@ -18,7 +18,6 @@ package com.agenew.detection.tflite;
import android.content.res.AssetManager; import android.content.res.AssetManager;
import android.graphics.Bitmap; import android.graphics.Bitmap;
import android.graphics.RectF; import android.graphics.RectF;
import android.os.Build;
import android.util.Log; import android.util.Log;
//import org.tensorflow.lite.Interpreter; //import org.tensorflow.lite.Interpreter;
...@@ -45,15 +44,17 @@ import java.util.Map; ...@@ -45,15 +44,17 @@ import java.util.Map;
import java.util.PriorityQueue; import java.util.PriorityQueue;
import java.util.Vector; import java.util.Vector;
/** /**
* Wrapper for frozen detection models trained using the Tensorflow Object Detection API: * Wrapper for frozen detection models trained using the Tensorflow Object
* - https://github.com/tensorflow/models/tree/master/research/object_detection * Detection API: -
* https://github.com/tensorflow/models/tree/master/research/object_detection
* where you can find the training code. * where you can find the training code.
* <p> * <p>
* To use pretrained models in the API or convert to TF Lite models, please see docs for details: * To use pretrained models in the API or convert to TF Lite models, please see
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md * docs for details: -
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android * https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
* -
* https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
*/ */
public class YoloV5Classifier implements Classifier { public class YoloV5Classifier implements Classifier {
private static final String TAG = "YoloV5Classifier"; private static final String TAG = "YoloV5Classifier";
...@@ -66,16 +67,8 @@ public class YoloV5Classifier implements Classifier { ...@@ -66,16 +67,8 @@ public class YoloV5Classifier implements Classifier {
* @param labelFilename The filepath of label file for classes. * @param labelFilename The filepath of label file for classes.
* @param isQuantized Boolean representing model is quantized or not * @param isQuantized Boolean representing model is quantized or not
*/ */
public static YoloV5Classifier create( public static YoloV5Classifier create(final AssetManager assetManager, final String modelFilename,
final AssetManager assetManager, final String labelFilename, final boolean isQuantized, final int inputSize) throws IOException {
final String modelFilename,
final String labelFilename,
final boolean isQuantized,
final int inputSize
/*final int[] output_width,
final int[][] masks,
final int[] anchors*/)
throws IOException {
final YoloV5Classifier d = new YoloV5Classifier(); final YoloV5Classifier d = new YoloV5Classifier();
String actualFilename = labelFilename.split("file:///android_asset/")[1]; String actualFilename = labelFilename.split("file:///android_asset/")[1];
...@@ -91,27 +84,6 @@ public class YoloV5Classifier implements Classifier { ...@@ -91,27 +84,6 @@ public class YoloV5Classifier implements Classifier {
try { try {
Interpreter.Options options = (new Interpreter.Options()); Interpreter.Options options = (new Interpreter.Options());
options.setNumThreads(NUM_THREADS); options.setNumThreads(NUM_THREADS);
if (isNNAPI) {
d.nnapiDelegate = null;
// Initialize interpreter with NNAPI delegate for Android Pie or above
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
d.nnapiDelegate = new NnApiDelegate();
options.addDelegate(d.nnapiDelegate);
options.setNumThreads(NUM_THREADS);
// options.setUseNNAPI(false);
// options.setAllowFp16PrecisionForFp32(true);
// options.setAllowBufferHandleOutput(true);
options.setUseNNAPI(true);
}
}
/*
if (isGPU) {
GpuDelegate.Options gpu_options = new GpuDelegate.Options();
gpu_options.setPrecisionLossAllowed(true); // It seems that the default is true
gpu_options.setInferencePreference(GpuDelegate.Options.INFERENCE_PREFERENCE_SUSTAINED_SPEED);
d.gpuDelegate = new GpuDelegate(gpu_options);
options.addDelegate(d.gpuDelegate);
}*/
d.tfliteModel = Utils.loadModelFile(assetManager, modelFilename); d.tfliteModel = Utils.loadModelFile(assetManager, modelFilename);
d.tfLite = new Interpreter(d.tfliteModel, options); d.tfLite = new Interpreter(d.tfliteModel, options);
} catch (Exception e) { } catch (Exception e) {
...@@ -131,11 +103,9 @@ public class YoloV5Classifier implements Classifier { ...@@ -131,11 +103,9 @@ public class YoloV5Classifier implements Classifier {
d.imgData.order(ByteOrder.nativeOrder()); d.imgData.order(ByteOrder.nativeOrder());
d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE]; d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2) + Math.pow((inputSize / 8), 2)) * 3); d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2)
// d.OUTPUT_WIDTH = output_width; + Math.pow((inputSize / 8), 2)) * 3);
// d.MASKS = masks; if (d.isModelQuantized) {
// d.ANCHORS = anchors;
if (d.isModelQuantized){
Tensor inpten = d.tfLite.getInputTensor(0); Tensor inpten = d.tfLite.getInputTensor(0);
d.inp_scale = inpten.quantizationParams().getScale(); d.inp_scale = inpten.quantizationParams().getScale();
d.inp_zero_point = inpten.quantizationParams().getZeroPoint(); d.inp_zero_point = inpten.quantizationParams().getZeroPoint();
...@@ -156,6 +126,7 @@ public class YoloV5Classifier implements Classifier { ...@@ -156,6 +126,7 @@ public class YoloV5Classifier implements Classifier {
public int getInputSize() { public int getInputSize() {
return INPUT_SIZE; return INPUT_SIZE;
} }
@Override @Override
public void enableStatLogging(final boolean logStats) { public void enableStatLogging(final boolean logStats) {
} }
...@@ -170,10 +141,8 @@ public class YoloV5Classifier implements Classifier { ...@@ -170,10 +141,8 @@ public class YoloV5Classifier implements Classifier {
tfLite.close(); tfLite.close();
tfLite = null; tfLite = null;
/* /*
if (gpuDelegate != null) { * if (gpuDelegate != null) { gpuDelegate.close(); gpuDelegate = null; }
gpuDelegate.close(); */
gpuDelegate = null;
}*/
if (nnapiDelegate != null) { if (nnapiDelegate != null) {
nnapiDelegate.close(); nnapiDelegate.close();
nnapiDelegate = null; nnapiDelegate = null;
...@@ -182,12 +151,8 @@ public class YoloV5Classifier implements Classifier { ...@@ -182,12 +151,8 @@ public class YoloV5Classifier implements Classifier {
} }
public void setNumThreads(int num_threads) { public void setNumThreads(int num_threads) {
if (tfLite != null) tfLite.setNumThreads(num_threads); if (tfLite != null)
} tfLite.setNumThreads(num_threads);
@Override
public void setUseNNAPI(boolean isChecked) {
// if (tfLite != null) tfLite.setUseNNAPI(isChecked);
} }
private void recreateInterpreter() { private void recreateInterpreter() {
...@@ -199,11 +164,9 @@ public class YoloV5Classifier implements Classifier { ...@@ -199,11 +164,9 @@ public class YoloV5Classifier implements Classifier {
public void useGpu() { public void useGpu() {
/* /*
if (gpuDelegate == null) { * if (gpuDelegate == null) { gpuDelegate = new GpuDelegate();
gpuDelegate = new GpuDelegate(); * tfliteOptions.addDelegate(gpuDelegate); recreateInterpreter(); }
tfliteOptions.addDelegate(gpuDelegate); */
recreateInterpreter();
}*/
} }
public void useCPU() { public void useCPU() {
...@@ -228,22 +191,13 @@ public class YoloV5Classifier implements Classifier { ...@@ -228,22 +191,13 @@ public class YoloV5Classifier implements Classifier {
private final float IMAGE_STD = 255.0f; private final float IMAGE_STD = 255.0f;
//config yolo // config yolo
private int INPUT_SIZE = -1; private int INPUT_SIZE = -1;
// private int[] OUTPUT_WIDTH;
// private int[][] MASKS;
// private int[] ANCHORS;
private int output_box; private int output_box;
private static final float[] XYSCALE = new float[]{1.2f, 1.1f, 1.05f};
private static final int NUM_BOXES_PER_BLOCK = 3;
// Number of threads in the java app // Number of threads in the java app
private static final int NUM_THREADS = 1; private static final int NUM_THREADS = 1;
private static boolean isNNAPI = false;
private static boolean isGPU = false;
private boolean isModelQuantized; private boolean isModelQuantized;
...@@ -273,19 +227,17 @@ public class YoloV5Classifier implements Classifier { ...@@ -273,19 +227,17 @@ public class YoloV5Classifier implements Classifier {
private float oup_scale; private float oup_scale;
private int oup_zero_point; private int oup_zero_point;
private int numClass; private int numClass;
private YoloV5Classifier() { private YoloV5Classifier() {
} }
//non maximum suppression // non maximum suppression
protected ArrayList<Recognition> nms(ArrayList<Recognition> list) { protected ArrayList<Recognition> nms(ArrayList<Recognition> list) {
ArrayList<Recognition> nmsList = new ArrayList<Recognition>(); ArrayList<Recognition> nmsList = new ArrayList<Recognition>();
for (int k = 0; k < labels.size(); k++) { for (int k = 0; k < labels.size(); k++) {
//1.find max confidence per class // 1.find max confidence per class
PriorityQueue<Recognition> pq = PriorityQueue<Recognition> pq = new PriorityQueue<Recognition>(50, new Comparator<Recognition>() {
new PriorityQueue<Recognition>(
50,
new Comparator<Recognition>() {
@Override @Override
public int compare(final Recognition lhs, final Recognition rhs) { public int compare(final Recognition lhs, final Recognition rhs) {
// Intentionally reversed to put high confidence at the head of the queue. // Intentionally reversed to put high confidence at the head of the queue.
...@@ -299,9 +251,9 @@ public class YoloV5Classifier implements Classifier { ...@@ -299,9 +251,9 @@ public class YoloV5Classifier implements Classifier {
} }
} }
//2.do non maximum suppression // 2.do non maximum suppression
while (pq.size() > 0) { while (pq.size() > 0) {
//insert detection with max confidence // insert detection with max confidence
Recognition[] a = new Recognition[pq.size()]; Recognition[] a = new Recognition[pq.size()];
Recognition[] detections = pq.toArray(a); Recognition[] detections = pq.toArray(a);
Recognition max = detections[0]; Recognition max = detections[0];
...@@ -327,11 +279,10 @@ public class YoloV5Classifier implements Classifier { ...@@ -327,11 +279,10 @@ public class YoloV5Classifier implements Classifier {
} }
protected float box_intersection(RectF a, RectF b) { protected float box_intersection(RectF a, RectF b) {
float w = overlap((a.left + a.right) / 2, a.right - a.left, float w = overlap((a.left + a.right) / 2, a.right - a.left, (b.left + b.right) / 2, b.right - b.left);
(b.left + b.right) / 2, b.right - b.left); float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top, (b.top + b.bottom) / 2, b.bottom - b.top);
float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top, if (w < 0 || h < 0)
(b.top + b.bottom) / 2, b.bottom - b.top); return 0;
if (w < 0 || h < 0) return 0;
float area = w * h; float area = w * h;
return area; return area;
} }
...@@ -359,11 +310,7 @@ public class YoloV5Classifier implements Classifier { ...@@ -359,11 +310,7 @@ public class YoloV5Classifier implements Classifier {
* Writes Image data into a {@code ByteBuffer}. * Writes Image data into a {@code ByteBuffer}.
*/ */
protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) { protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
// ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * BATCH_SIZE * INPUT_SIZE * INPUT_SIZE * PIXEL_SIZE);
// byteBuffer.order(ByteOrder.nativeOrder());
// int[] intValues = new int[INPUT_SIZE * INPUT_SIZE];
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight()); bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int pixel = 0;
imgData.rewind(); imgData.rewind();
for (int i = 0; i < INPUT_SIZE; ++i) { for (int i = 0; i < INPUT_SIZE; ++i) {
...@@ -371,8 +318,10 @@ public class YoloV5Classifier implements Classifier { ...@@ -371,8 +318,10 @@ public class YoloV5Classifier implements Classifier {
int pixelValue = intValues[i * INPUT_SIZE + j]; int pixelValue = intValues[i * INPUT_SIZE + j];
if (isModelQuantized) { if (isModelQuantized) {
// Quantized model // Quantized model
imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point)); imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale
imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point)); + inp_zero_point));
imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale
+ inp_zero_point));
imgData.put((byte) (((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point)); imgData.put((byte) (((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
} else { // Float model } else { // Float model
imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD); imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
...@@ -385,16 +334,14 @@ public class YoloV5Classifier implements Classifier { ...@@ -385,16 +334,14 @@ public class YoloV5Classifier implements Classifier {
} }
public ArrayList<Recognition> recognizeImage(Bitmap bitmap) { public ArrayList<Recognition> recognizeImage(Bitmap bitmap) {
ByteBuffer byteBuffer_ = convertBitmapToByteBuffer(bitmap); convertBitmapToByteBuffer(bitmap);
Map<Integer, Object> outputMap = new HashMap<>(); Map<Integer, Object> outputMap = new HashMap<Integer, Object>();
// float[][][] outbuf = new float[1][output_box][labels.size() + 5];
outData.rewind(); outData.rewind();
outputMap.put(0, outData); outputMap.put(0, outData);
Log.d("YoloV5Classifier", "mObjThresh: " + getObjThresh()); Log.d("YoloV5Classifier", "mObjThresh: " + getObjThresh());
Object[] inputArray = {imgData}; Object[] inputArray = { imgData };
tfLite.runForMultipleInputsOutputs(inputArray, outputMap); tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
ByteBuffer byteBuffer = (ByteBuffer) outputMap.get(0); ByteBuffer byteBuffer = (ByteBuffer) outputMap.get(0);
...@@ -406,10 +353,9 @@ public class YoloV5Classifier implements Classifier { ...@@ -406,10 +353,9 @@ public class YoloV5Classifier implements Classifier {
Log.d("YoloV5Classifier", "out[0] detect start"); Log.d("YoloV5Classifier", "out[0] detect start");
for (int i = 0; i < output_box; ++i) { for (int i = 0; i < output_box; ++i) {
for (int j = 0; j < numClass + 5; ++j) { for (int j = 0; j < numClass + 5; ++j) {
if (isModelQuantized){ if (isModelQuantized) {
out[0][i][j] = oup_scale * (((int) byteBuffer.get() & 0xFF) - oup_zero_point); out[0][i][j] = oup_scale * (((int) byteBuffer.get() & 0xFF) - oup_zero_point);
} } else {
else {
out[0][i][j] = byteBuffer.getFloat(); out[0][i][j] = byteBuffer.getFloat();
} }
} }
...@@ -418,7 +364,7 @@ public class YoloV5Classifier implements Classifier { ...@@ -418,7 +364,7 @@ public class YoloV5Classifier implements Classifier {
out[0][i][j] *= getInputSize(); out[0][i][j] *= getInputSize();
} }
} }
for (int i = 0; i < output_box; ++i){ for (int i = 0; i < output_box; ++i) {
final int offset = 0; final int offset = 0;
final float confidence = out[0][i][4]; final float confidence = out[0][i][4];
int detectedClass = -1; int detectedClass = -1;
...@@ -443,38 +389,33 @@ public class YoloV5Classifier implements Classifier { ...@@ -443,38 +389,33 @@ public class YoloV5Classifier implements Classifier {
final float w = out[0][i][2]; final float w = out[0][i][2];
final float h = out[0][i][3]; final float h = out[0][i][3];
Log.d("YoloV5Classifier", Log.d("YoloV5Classifier", Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h);
Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h);
final RectF rect = final RectF rect = new RectF(Math.max(0, xPos - w / 2), Math.max(0, yPos - h / 2),
new RectF( Math.min(bitmap.getWidth() - 1, xPos + w / 2), Math.min(bitmap.getHeight() - 1, yPos + h / 2));
Math.max(0, xPos - w / 2), detections.add(new Recognition("" + offset, labels.get(detectedClass), confidenceInClass, rect,
Math.max(0, yPos - h / 2), detectedClass));
Math.min(bitmap.getWidth() - 1, xPos + w / 2),
Math.min(bitmap.getHeight() - 1, yPos + h / 2));
detections.add(new Recognition("" + offset, labels.get(detectedClass),
confidenceInClass, rect, detectedClass));
} }
} }
Log.d(TAG, "detect end"); Log.d(TAG, "detect end");
final ArrayList<Recognition> recognitions = nms(detections); final ArrayList<Recognition> recognitions = nms(detections);
// final ArrayList<Recognition> recognitions = detections;
return recognitions; return recognitions;
} }
public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH, int intputSize) { public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH,
int intputSize) {
// (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax) // (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
float halfHeight = height / 2.0f; float halfHeight = height / 2.0f;
float halfWidth = width / 2.0f; float halfWidth = width / 2.0f;
float[] pred_coor = new float[]{x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight}; float[] pred_coor = new float[] { x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight };
// (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org) // (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
float resize_ratioW = 1.0f * intputSize / oriW; float resize_ratioW = 1.0f * intputSize / oriW;
float resize_ratioH = 1.0f * intputSize / oriH; float resize_ratioH = 1.0f * intputSize / oriH;
float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; //min float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; // min
float dw = (intputSize - resize_ratio * oriW) / 2; float dw = (intputSize - resize_ratio * oriW) / 2;
float dh = (intputSize - resize_ratio * oriH) / 2; float dh = (intputSize - resize_ratio * oriH) / 2;
......
...@@ -35,27 +35,17 @@ import com.agenew.detection.env.ImageUtils; ...@@ -35,27 +35,17 @@ import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger; import com.agenew.detection.env.Logger;
import com.agenew.detection.tflite.Classifier.Recognition; import com.agenew.detection.tflite.Classifier.Recognition;
/** A tracker that handles non-max suppression and matches existing objects to new detections. */ /**
* A tracker that handles non-max suppression and matches existing objects to
* new detections.
*/
public class MultiBoxTracker { public class MultiBoxTracker {
private static final float TEXT_SIZE_DIP = 18; private static final float TEXT_SIZE_DIP = 18;
private static final float MIN_SIZE = 16.0f; private static final float MIN_SIZE = 16.0f;
private static final int[] COLORS = { private static final int[] COLORS = { Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA,
Color.BLUE, Color.WHITE, Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"),
Color.RED, Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"),
Color.GREEN, Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") };
Color.YELLOW,
Color.CYAN,
Color.MAGENTA,
Color.WHITE,
Color.parseColor("#55FF55"),
Color.parseColor("#FFA500"),
Color.parseColor("#FF8888"),
Color.parseColor("#AAAAFF"),
Color.parseColor("#FFFFAA"),
Color.parseColor("#55AAAA"),
Color.parseColor("#AA33AA"),
Color.parseColor("#0D0068")
};
final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>(); final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
private final Logger logger = new Logger(); private final Logger logger = new Logger();
private final Queue<Integer> availableColors = new LinkedList<Integer>(); private final Queue<Integer> availableColors = new LinkedList<Integer>();
...@@ -80,14 +70,12 @@ public class MultiBoxTracker { ...@@ -80,14 +70,12 @@ public class MultiBoxTracker {
boxPaint.setStrokeJoin(Join.ROUND); boxPaint.setStrokeJoin(Join.ROUND);
boxPaint.setStrokeMiter(100); boxPaint.setStrokeMiter(100);
textSizePx = textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
TypedValue.applyDimension( context.getResources().getDisplayMetrics());
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx); borderedText = new BorderedText(textSizePx);
} }
public synchronized void setFrameConfiguration( public synchronized void setFrameConfiguration(final int width, final int height, final int sensorOrientation) {
final int width, final int height, final int sensorOrientation) {
frameWidth = width; frameWidth = width;
frameHeight = height; frameHeight = height;
this.sensorOrientation = sensorOrientation; this.sensorOrientation = sensorOrientation;
...@@ -122,18 +110,11 @@ public class MultiBoxTracker { ...@@ -122,18 +110,11 @@ public class MultiBoxTracker {
public synchronized void draw(final Canvas canvas) { public synchronized void draw(final Canvas canvas) {
final boolean rotated = sensorOrientation % 180 == 90; final boolean rotated = sensorOrientation % 180 == 90;
final float multiplier = final float multiplier = Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
Math.min(
canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth)); canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
frameToCanvasMatrix = frameToCanvasMatrix = ImageUtils.getTransformationMatrix(frameWidth, frameHeight,
ImageUtils.getTransformationMatrix(
frameWidth,
frameHeight,
(int) (multiplier * (rotated ? frameHeight : frameWidth)), (int) (multiplier * (rotated ? frameHeight : frameWidth)),
(int) (multiplier * (rotated ? frameWidth : frameHeight)), (int) (multiplier * (rotated ? frameWidth : frameHeight)), sensorOrientation, false);
sensorOrientation,
false);
for (final TrackedRecognition recognition : trackedObjects) { for (final TrackedRecognition recognition : trackedObjects) {
final RectF trackedPos = new RectF(recognition.location); final RectF trackedPos = new RectF(recognition.location);
...@@ -143,14 +124,10 @@ public class MultiBoxTracker { ...@@ -143,14 +124,10 @@ public class MultiBoxTracker {
float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f; float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint); canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
final String labelString = final String labelString = !TextUtils.isEmpty(recognition.title)
!TextUtils.isEmpty(recognition.title)
? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence)) ? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
: String.format("%.2f", (100 * recognition.detectionConfidence)); : String.format("%.2f", (100 * recognition.detectionConfidence));
// borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
// labelString);
borderedText.drawText(
canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
} }
} }
...@@ -169,8 +146,7 @@ public class MultiBoxTracker { ...@@ -169,8 +146,7 @@ public class MultiBoxTracker {
final RectF detectionScreenRect = new RectF(); final RectF detectionScreenRect = new RectF();
rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect); rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
logger.v( logger.v("Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
"Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect)); screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
...@@ -193,13 +169,8 @@ public class MultiBoxTracker { ...@@ -193,13 +169,8 @@ public class MultiBoxTracker {
trackedRecognition.detectionConfidence = potential.first; trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.location = new RectF(potential.second.getLocation()); trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.title = potential.second.getTitle(); trackedRecognition.title = potential.second.getTitle();
// trackedRecognition.color = COLORS[trackedObjects.size() % COLORS.length];
trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length]; trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length];
trackedObjects.add(trackedRecognition); trackedObjects.add(trackedRecognition);
// if (trackedObjects.size() >= COLORS.length) {
// break;
// }
} }
} }
......
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0"/>
<item
android:color="#00000000"
android:offset="1.0"/>
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<resources>
<!-- Base application theme. -->
<style name="AppTheme.ObjectDetection" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/tfe_color_primary</item>
<item name="colorPrimaryDark">@color/tfe_color_primary_dark</item>
<item name="colorAccent">@color/tfe_color_accent</item>
</style>
</resources>
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!