Commit 2756155a by wanglei

format

1 parent e08d779b
Showing with 284 additions and 1114 deletions
......@@ -14,11 +14,9 @@
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:icon="@drawable/ic_launcher"
android:label="@string/tfe_od_app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme.ObjectDetection"
android:hardwareAccelerated="true"
android:installLocation="internalOnly">
......
......@@ -17,6 +17,7 @@
package com.agenew.detection;
import android.Manifest;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
......@@ -36,17 +37,12 @@ import android.os.Handler;
import android.os.HandlerThread;
import android.os.Trace;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
//import androidx.appcompat.widget.Toolbar;
import android.util.Size;
import android.view.Surface;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
......@@ -61,11 +57,8 @@ import java.util.ArrayList;
import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger;
public abstract class CameraActivity extends AppCompatActivity
implements OnImageAvailableListener,
Camera.PreviewCallback,
// CompoundButton.OnCheckedChangeListener,
View.OnClickListener {
public abstract class CameraActivity extends Activity
implements OnImageAvailableListener, Camera.PreviewCallback, View.OnClickListener {
private static final Logger LOGGER = new Logger();
private static final int PERMISSIONS_REQUEST = 1;
......@@ -86,7 +79,7 @@ public abstract class CameraActivity extends AppCompatActivity
protected int defaultDeviceIndex = 2;
private Runnable postInferenceCallback;
private Runnable imageConverter;
protected ArrayList<String> modelStrings = new ArrayList<String>();
protected ArrayList<String> modelStrings = new ArrayList<>();
private LinearLayout bottomSheetLayout;
private LinearLayout gestureLayout;
......@@ -103,7 +96,7 @@ public abstract class CameraActivity extends AppCompatActivity
int currentModel = -1;
int currentNumThreads = -1;
ArrayList<String> deviceStrings = new ArrayList<String>();
ArrayList<String> deviceStrings = new ArrayList<>();
@Override
protected void onCreate(final Bundle savedInstanceState) {
......@@ -112,9 +105,6 @@ public abstract class CameraActivity extends AppCompatActivity
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tfe_od_activity_camera);
// Toolbar toolbar = findViewById(R.id.toolbar);
// setSupportActionBar(toolbar);
// getSupportActionBar().setDisplayShowTitleEnabled(false);
if (hasPermission()) {
setFragment();
......@@ -131,19 +121,12 @@ public abstract class CameraActivity extends AppCompatActivity
deviceStrings.add("GPU");
deviceStrings.add("NNAPI");
deviceView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
ArrayAdapter<String> deviceAdapter =
new ArrayAdapter<>(
CameraActivity.this , R.layout.deviceview_row, R.id.deviceview_row_text, deviceStrings);
ArrayAdapter<String> deviceAdapter = new ArrayAdapter<>(this, R.layout.deviceview_row,
R.id.deviceview_row_text, deviceStrings);
deviceView.setAdapter(deviceAdapter);
deviceView.setItemChecked(defaultDeviceIndex, true);
currentDevice = defaultDeviceIndex;
deviceView.setOnItemClickListener(
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel();
}
});
deviceView.setOnItemClickListener((parent, view, position, id) -> updateActiveModel());
bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
gestureLayout = findViewById(R.id.gesture_layout);
......@@ -153,23 +136,15 @@ public abstract class CameraActivity extends AppCompatActivity
modelStrings = getModelStrings(getAssets(), ASSET_PATH);
modelView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
ArrayAdapter<String> modelAdapter =
new ArrayAdapter<>(
CameraActivity.this , R.layout.listview_row, R.id.listview_row_text, modelStrings);
ArrayAdapter<String> modelAdapter = new ArrayAdapter<>(this, R.layout.listview_row,
R.id.listview_row_text, modelStrings);
modelView.setAdapter(modelAdapter);
modelView.setItemChecked(defaultModelIndex, true);
currentModel = defaultModelIndex;
modelView.setOnItemClickListener(
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel();
}
});
modelView.setOnItemClickListener((parent, view, position, id) -> updateActiveModel());
ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
vto.addOnGlobalLayoutListener(
new ViewTreeObserver.OnGlobalLayoutListener() {
vto.addOnGlobalLayoutListener(new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
......@@ -177,7 +152,6 @@ public abstract class CameraActivity extends AppCompatActivity
} else {
gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
// int width = bottomSheetLayout.getMeasuredWidth();
int height = gestureLayout.getMeasuredHeight();
sheetBehavior.setPeekHeight(height);
......@@ -185,20 +159,17 @@ public abstract class CameraActivity extends AppCompatActivity
});
sheetBehavior.setHideable(false);
sheetBehavior.setBottomSheetCallback(
new BottomSheetBehavior.BottomSheetCallback() {
sheetBehavior.setBottomSheetCallback(new BottomSheetBehavior.BottomSheetCallback() {
@Override
public void onStateChanged(@NonNull View bottomSheet, int newState) {
switch (newState) {
case BottomSheetBehavior.STATE_HIDDEN:
break;
case BottomSheetBehavior.STATE_EXPANDED:
{
case BottomSheetBehavior.STATE_EXPANDED: {
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
}
break;
case BottomSheetBehavior.STATE_COLLAPSED:
{
case BottomSheetBehavior.STATE_COLLAPSED: {
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
}
break;
......@@ -211,7 +182,8 @@ public abstract class CameraActivity extends AppCompatActivity
}
@Override
public void onSlide(@NonNull View bottomSheet, float slideOffset) {}
public void onSlide(@NonNull View bottomSheet, float slideOffset) {
}
});
frameValueTextView = findViewById(R.id.frame_info);
......@@ -222,10 +194,8 @@ public abstract class CameraActivity extends AppCompatActivity
minusImageView.setOnClickListener(this);
}
protected ArrayList<String> getModelStrings(AssetManager mgr, String path){
ArrayList<String> res = new ArrayList<String>();
protected ArrayList<String> getModelStrings(AssetManager mgr, String path) {
ArrayList<String> res = new ArrayList<>();
try {
String[] files = mgr.list(path);
for (String file : files) {
......@@ -235,8 +205,7 @@ public abstract class CameraActivity extends AppCompatActivity
}
}
}
catch (IOException e){
} catch (IOException e) {
System.err.println("getModelStrings: " + e.getMessage());
}
return res;
......@@ -247,14 +216,6 @@ public abstract class CameraActivity extends AppCompatActivity
return rgbBytes;
}
protected int getLuminanceStride() {
return yRowStride;
}
protected byte[] getLuminance() {
return yuvBytes[0];
}
/** Callback for android.hardware.Camera API */
@Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
......@@ -281,16 +242,9 @@ public abstract class CameraActivity extends AppCompatActivity
yuvBytes[0] = bytes;
yRowStride = previewWidth;
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
}
};
imageConverter = () -> ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
postInferenceCallback =
new Runnable() {
postInferenceCallback = new Runnable() {
@Override
public void run() {
camera.addCallbackBuffer(bytes);
......@@ -329,25 +283,10 @@ public abstract class CameraActivity extends AppCompatActivity
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
}
};
imageConverter = () -> ImageUtils.convertYUV420ToARGB8888(yuvBytes[0], yuvBytes[1], yuvBytes[2], previewWidth,
previewHeight, yRowStride, uvRowStride, uvPixelStride, rgbBytes);
postInferenceCallback =
new Runnable() {
postInferenceCallback = new Runnable() {
@Override
public void run() {
image.close();
......@@ -415,8 +354,8 @@ public abstract class CameraActivity extends AppCompatActivity
}
@Override
public void onRequestPermissionsResult(
final int requestCode, final String[] permissions, final int[] grantResults) {
public void onRequestPermissionsResult(final int requestCode, final String[] permissions,
final int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST) {
if (allPermissionsGranted(grantResults)) {
......@@ -447,19 +386,15 @@ public abstract class CameraActivity extends AppCompatActivity
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
Toast.makeText(
CameraActivity.this,
"Camera permission is required for this demo",
Toast.LENGTH_LONG)
Toast.makeText(CameraActivity.this, "Camera permission is required for this demo", Toast.LENGTH_LONG)
.show();
}
requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST);
requestPermissions(new String[] { PERMISSION_CAMERA }, PERMISSIONS_REQUEST);
}
}
// Returns true if the device supports the required hardware level, or better.
private boolean isHardwareLevelSupported(
CameraCharacteristics characteristics, int requiredLevel) {
private boolean isHardwareLevelSupported(CameraCharacteristics characteristics, int requiredLevel) {
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel;
......@@ -480,8 +415,8 @@ public abstract class CameraActivity extends AppCompatActivity
continue;
}
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
......@@ -490,9 +425,7 @@ public abstract class CameraActivity extends AppCompatActivity
// Fallback to camera1 API for internal cameras that don't have full support.
// This should help with legacy situations where using the camera2 API causes
// distorted or otherwise broken previews.
useCamera2API =
(facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
|| isHardwareLevelSupported(
useCamera2API = (facing == CameraCharacteristics.LENS_FACING_EXTERNAL) || isHardwareLevelSupported(
characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
LOGGER.i("Camera API lv2?: %s", useCamera2API);
return cameraId;
......@@ -509,25 +442,17 @@ public abstract class CameraActivity extends AppCompatActivity
Fragment fragment;
if (useCamera2API) {
CameraConnectionFragment camera2Fragment =
CameraConnectionFragment.newInstance(
new CameraConnectionFragment.ConnectionCallback() {
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
CameraConnectionFragment camera2Fragment = CameraConnectionFragment
.newInstance((size, rotation) -> {
previewHeight = size.getHeight();
previewWidth = size.getWidth();
CameraActivity.this.onPreviewSizeChosen(size, rotation);
}
},
this,
getLayoutId(),
getDesiredPreviewFrameSize());
}, this, getLayoutId(), getDesiredPreviewFrameSize());
camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment;
} else {
fragment =
new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
fragment = new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
}
getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
......@@ -569,19 +494,13 @@ public abstract class CameraActivity extends AppCompatActivity
}
}
// @Override
// public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// setUseNNAPI(isChecked);
// if (isChecked) apiSwitchCompat.setText("NNAPI");
// else apiSwitchCompat.setText("TFLITE");
// }
@Override
public void onClick(View v) {
if (v.getId() == R.id.plus) {
String threads = threadsTextView.getText().toString().trim();
int numThreads = Integer.parseInt(threads);
if (numThreads >= 9) return;
if (numThreads >= 9)
return;
numThreads++;
threadsTextView.setText(String.valueOf(numThreads));
setNumThreads(numThreads);
......@@ -610,6 +529,7 @@ public abstract class CameraActivity extends AppCompatActivity
}
protected abstract void updateActiveModel();
protected abstract void processImage();
protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
......@@ -619,6 +539,4 @@ public abstract class CameraActivity extends AppCompatActivity
protected abstract Size getDesiredPreviewFrameSize();
protected abstract void setNumThreads(int numThreads);
protected abstract void setUseNNAPI(boolean isChecked);
}
......@@ -23,7 +23,6 @@ import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
......@@ -67,8 +66,8 @@ public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of
* containing a DESIRED_SIZE x DESIRED_SIZE square.
* The camera preview size will be chosen to be the smallest frame by pixel size
* capable of containing a DESIRED_SIZE x DESIRED_SIZE square.
*/
private static final int MINIMUM_PREVIEW_SIZE = 320;
......@@ -84,29 +83,34 @@ public class CameraConnectionFragment extends Fragment {
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/** A {@link Semaphore} to prevent the app from exiting before closing the camera. */
/**
* A {@link Semaphore} to prevent the app from exiting before closing the
* camera.
*/
private final Semaphore cameraOpenCloseLock = new Semaphore(1);
/** A {@link OnImageAvailableListener} to receive frames as they are available. */
/**
* A {@link OnImageAvailableListener} to receive frames as they are available.
*/
private final OnImageAvailableListener imageListener;
/** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
/**
* The input size in pixels desired by TensorFlow (width and height of a square
* bitmap).
*/
private final Size inputSize;
/** The layout identifier to inflate for this Fragment. */
private final int layout;
private final ConnectionCallback cameraConnectionCallback;
private final CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
private final CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
public void onCaptureProgressed(final CameraCaptureSession session, final CaptureRequest request,
final CaptureResult partialResult) {
}
@Override
public void onCaptureCompleted(
final CameraCaptureSession session,
final CaptureRequest request,
final TotalCaptureResult result) {}
public void onCaptureCompleted(final CameraCaptureSession session, final CaptureRequest request,
final TotalCaptureResult result) {
}
};
/** ID of the current {@link CameraDevice}. */
private String cameraId;
......@@ -130,12 +134,15 @@ public class CameraConnectionFragment extends Fragment {
private CaptureRequest.Builder previewRequestBuilder;
/** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
private CaptureRequest previewRequest;
/** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */
private final CameraDevice.StateCallback stateCallback =
new CameraDevice.StateCallback() {
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice}
* changes its state.
*/
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(final CameraDevice cd) {
// This method is called when the camera is opened. We start camera preview here.
// This method is called when the camera is opened. We start camera preview
// here.
cameraOpenCloseLock.release();
cameraDevice = cd;
createCameraPreviewSession();
......@@ -160,20 +167,17 @@ public class CameraConnectionFragment extends Fragment {
}
};
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
* TextureView}.
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events
* on a {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {
public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
configureTransform(width, height);
}
......@@ -183,14 +187,12 @@ public class CameraConnectionFragment extends Fragment {
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
};
private CameraConnectionFragment(
final ConnectionCallback connectionCallback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
private CameraConnectionFragment(final ConnectionCallback connectionCallback,
final OnImageAvailableListener imageListener, final int layout, final Size inputSize) {
this.cameraConnectionCallback = connectionCallback;
this.imageListener = imageListener;
this.layout = layout;
......@@ -198,10 +200,12 @@ public class CameraConnectionFragment extends Fragment {
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the minimum of both, or an exact match if possible.
* Given {@code choices} of {@code Size}s supported by a camera, chooses the
* smallest one whose width and height are at least as large as the minimum of
* both, or an exact match if possible.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param choices The list of sizes that the camera supports for the intended
* output class
* @param width The minimum desired width
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
......@@ -210,13 +214,15 @@ public class CameraConnectionFragment extends Fragment {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface
// Collect the supported resolutions that are at least as big as the preview
// Surface
boolean exactSizeFound = false;
final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>();
for (final Size option : choices) {
if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged.
// Set the size but don't return yet so that remaining sizes will still be
// logged.
exactSizeFound = true;
}
......@@ -247,11 +253,8 @@ public class CameraConnectionFragment extends Fragment {
}
}
public static CameraConnectionFragment newInstance(
final ConnectionCallback callback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
public static CameraConnectionFragment newInstance(final ConnectionCallback callback,
final OnImageAvailableListener imageListener, final int layout, final Size inputSize) {
return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
}
......@@ -263,8 +266,7 @@ public class CameraConnectionFragment extends Fragment {
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(
new Runnable() {
activity.runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
......@@ -274,8 +276,8 @@ public class CameraConnectionFragment extends Fragment {
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
......@@ -294,9 +296,12 @@ public class CameraConnectionFragment extends Fragment {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// When the screen is turned off and turned back on, the SurfaceTexture is
// already
// available, and "onSurfaceTextureAvailable" will not be called. In that case,
// we can open
// a camera and start preview from here (otherwise, we wait until the surface is
// ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
openCamera(textureView.getWidth(), textureView.getHeight());
......@@ -323,18 +328,16 @@ public class CameraConnectionFragment extends Fragment {
try {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
final StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// Danger, W.R.! Attempting to use too large a preview size could exceed the
// camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize =
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), inputSize.getWidth(),
inputSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
......@@ -347,10 +350,11 @@ public class CameraConnectionFragment extends Fragment {
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// Currently an NPE is thrown when the Camera2API is used but not supported on
// the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error)).show(getChildFragmentManager(),
FRAGMENT_DIALOG);
throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
}
......@@ -423,7 +427,8 @@ public class CameraConnectionFragment extends Fragment {
final SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
// We configure the size of default buffer to be the size of camera preview we
// want.
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// This is the output Surface we need to start preview.
......@@ -436,16 +441,14 @@ public class CameraConnectionFragment extends Fragment {
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
// Create the reader for the preview frames.
previewReader =
ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
previewReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(),
ImageFormat.YUV_420_888, 2);
previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
previewRequestBuilder.addTarget(previewReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(
Arrays.asList(surface, previewReader.getSurface()),
cameraDevice.createCaptureSession(Arrays.asList(surface, previewReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
......@@ -459,17 +462,15 @@ public class CameraConnectionFragment extends Fragment {
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
previewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(
previewRequest, captureCallback, backgroundHandler);
captureSession.setRepeatingRequest(previewRequest, captureCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
......@@ -479,17 +480,16 @@ public class CameraConnectionFragment extends Fragment {
public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
},
null);
}, null);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be
* called after the camera preview size is determined in setUpCameraOutputs and also the size of
* `mTextureView` is fixed.
* Configures the necessary {@link Matrix} transformation to `mTextureView`.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
......@@ -508,9 +508,7 @@ public class CameraConnectionFragment extends Fragment {
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
final float scale =
Math.max(
(float) viewHeight / previewSize.getHeight(),
final float scale = Math.max((float) viewHeight / previewSize.getHeight(),
(float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
......@@ -521,8 +519,8 @@ public class CameraConnectionFragment extends Fragment {
}
/**
* Callback for Activities to use to initialize their data once the selected preview size is
* known.
* Callback for Activities to use to initialize their data once the selected
* preview size is known.
*/
public interface ConnectionCallback {
void onPreviewSizeChosen(Size size, int cameraRotation);
......@@ -533,8 +531,7 @@ public class CameraConnectionFragment extends Fragment {
@Override
public int compare(final Size lhs, final Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum(
(long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
......@@ -553,17 +550,8 @@ public class CameraConnectionFragment extends Fragment {
@Override
public Dialog onCreateDialog(final Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(
android.R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialogInterface, final int i) {
activity.finish();
}
})
.create();
return new AlertDialog.Builder(activity).setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, (dialogInterface, i) -> activity.finish()).create();
}
}
}
......@@ -56,14 +56,12 @@ public class LegacyCameraConnectionFragment extends Fragment {
/** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
* TextureView}.
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events
* on a {@link TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
private final TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
public void onSurfaceTextureAvailable(final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
......@@ -71,8 +69,7 @@ public class LegacyCameraConnectionFragment extends Fragment {
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
if (focusModes != null && focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
......@@ -81,9 +78,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
Size previewSize = CameraConnectionFragment.chooseOptimalSize(sizes, desiredSize.getWidth(),
desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
......@@ -102,8 +98,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {}
public void onSurfaceTextureSizeChanged(final SurfaceTexture texture, final int width, final int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
......@@ -111,21 +107,22 @@ public class LegacyCameraConnectionFragment extends Fragment {
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
};
/** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread;
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
public LegacyCameraConnectionFragment(final Camera.PreviewCallback imageListener, final int layout,
final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
public View onCreateView(final LayoutInflater inflater, final ViewGroup container,
final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
......@@ -143,9 +140,12 @@ public class LegacyCameraConnectionFragment extends Fragment {
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// When the screen is turned off and turned back on, the SurfaceTexture is
// already
// available, and "onSurfaceTextureAvailable" will not be called. In that case,
// we can open
// a camera and start preview from here (otherwise, we wait until the surface is
// ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
......@@ -192,7 +192,8 @@ public class LegacyCameraConnectionFragment extends Fragment {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
if (ci.facing == CameraInfo.CAMERA_FACING_BACK)
return i;
}
return -1; // No camera found
}
......
......@@ -47,8 +47,8 @@ import com.agenew.detection.tflite.YoloV5Classifier;
import com.agenew.detection.tracking.MultiBoxTracker;
/**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
* objects.
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to
* detect and then track objects.
*/
public class MainActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
......@@ -82,9 +82,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
final float textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
......@@ -100,9 +99,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
} catch (final IOException e) {
e.printStackTrace();
LOGGER.e(e, "Exception initializing classifier!");
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.LENGTH_SHORT);
toast.show();
finish();
}
......@@ -125,18 +123,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback(
new DrawCallback() {
trackingOverlay.addCallback(new DrawCallback() {
@Override
public void drawCallback(final Canvas canvas) {
tracker.draw(canvas);
......@@ -157,8 +151,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
final int numThreads = Integer.parseInt(threads);
handler.post(() -> {
if (modelIndex == currentModel && deviceIndex == currentDevice
&& numThreads == currentNumThreads) {
if (modelIndex == currentModel && deviceIndex == currentDevice && numThreads == currentNumThreads) {
return;
}
currentModel = modelIndex;
......@@ -185,18 +178,15 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
if (detector == null) {
return;
}
}
catch(IOException e) {
} catch (IOException e) {
e.printStackTrace();
LOGGER.e(e, "Exception in updateActiveModel()");
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
Toast toast = Toast.makeText(getApplicationContext(), "Classifier could not be initialized",
Toast.LENGTH_SHORT);
toast.show();
finish();
}
if (device.equals("CPU")) {
detector.useCPU();
} else if (device.equals("GPU")) {
......@@ -209,11 +199,8 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
int cropSize = detector.getInputSize();
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
frameToCropTransform = ImageUtils.getTransformationMatrix(previewWidth, previewHeight, cropSize,
cropSize, sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
......@@ -245,8 +232,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
runInBackground(new Runnable() {
@Override
public void run() {
LOGGER.i("Running detection on image " + currTimestamp);
......@@ -270,8 +256,7 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
break;
}
final List<Classifier.Recognition> mappedRecognitions =
new LinkedList<Classifier.Recognition>();
final List<Classifier.Recognition> mappedRecognitions = new LinkedList<>();
for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation();
......@@ -290,14 +275,10 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
computingDetection = false;
runOnUiThread(
new Runnable() {
@Override
public void run() {
runOnUiThread(() -> {
showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms");
}
});
}
});
......@@ -313,18 +294,14 @@ public class MainActivity extends CameraActivity implements OnImageAvailableList
return DESIRED_PREVIEW_SIZE;
}
// Which detection model to use: by default uses Tensorflow Object Detection API frozen
// Which detection model to use: by default uses Tensorflow Object Detection API
// frozen
// checkpoints.
private enum DetectorMode {
TF_OD_API;
}
@Override
protected void setUseNNAPI(final boolean isChecked) {
runInBackground(() -> detector.setUseNNAPI(isChecked));
}
@Override
protected void setNumThreads(final int numThreads) {
runInBackground(() -> detector.setNumThreads(numThreads));
}
......
......@@ -38,9 +38,10 @@ public class AutoFitTextureView extends TextureView {
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
* calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
* Sets the aspect ratio for this view. The size of the view will be measured
* based on the ratio calculated from the parameters. Note that the actual sizes
* of parameters don't matter, that is, calling setAspectRatio(2, 3) and
* setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
......
......@@ -18,22 +18,20 @@ package com.agenew.detection.env;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
import java.util.Vector;
/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */
/**
* A class that encapsulates the tedious bits of rendering legible, bordered
* text onto a canvas.
*/
public class BorderedText {
private final Paint interiorPaint;
private final Paint exteriorPaint;
private final float textSize;
/**
* Creates a left-aligned bordered text object with a white interior, and a black exterior with
* the specified text size.
* Creates a left-aligned bordered text object with a white interior, and a
* black exterior with the specified text size.
*
* @param textSize text size in pixels
*/
......@@ -42,8 +40,8 @@ public class BorderedText {
}
/**
* Create a bordered text object with the specified interior and exterior colors, text size and
* alignment.
* Create a bordered text object with the specified interior and exterior
* colors, text size and alignment.
*
* @param interiorColor the interior text color
* @param exteriorColor the exterior text color
......@@ -64,8 +62,6 @@ public class BorderedText {
exteriorPaint.setStrokeWidth(textSize / 8);
exteriorPaint.setAntiAlias(false);
exteriorPaint.setAlpha(255);
this.textSize = textSize;
}
public void setTypeface(Typeface typeface) {
......@@ -78,8 +74,7 @@ public class BorderedText {
canvas.drawText(text, posX, posY, interiorPaint);
}
public void drawText(
final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
public void drawText(final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
float width = exteriorPaint.measureText(text);
float textSize = exteriorPaint.getTextSize();
......@@ -90,39 +85,4 @@ public class BorderedText {
canvas.drawText(text, posX, (posY + textSize), interiorPaint);
}
public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
int lineNum = 0;
for (final String line : lines) {
drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
++lineNum;
}
}
public void setInteriorColor(final int color) {
interiorPaint.setColor(color);
}
public void setExteriorColor(final int color) {
exteriorPaint.setColor(color);
}
public float getTextSize() {
return textSize;
}
public void setAlpha(final int alpha) {
interiorPaint.setAlpha(alpha);
exteriorPaint.setAlpha(alpha);
}
public void getTextBounds(
final String line, final int index, final int count, final Rect lineBounds) {
interiorPaint.getTextBounds(line, index, count, lineBounds);
}
public void setTextAlign(final Align align) {
interiorPaint.setTextAlign(align);
exteriorPaint.setTextAlign(align);
}
}
......@@ -23,22 +23,22 @@ import java.io.FileOutputStream;
/** Utility class for manipulating images. */
public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their
// ranges
// are normalized to eight bits.
static final int kMaxChannelValue = 262143;
@SuppressWarnings("unused")
private static final Logger LOGGER = new Logger();
/**
* Utility method to compute the allocated size in bytes of a YUV420SP image of the given
* dimensions.
* Utility method to compute the allocated size in bytes of a YUV420SP image of
* the given dimensions.
*/
public static int getYUVByteSize(final int width, final int height) {
// The luminance plane requires 1 byte per pixel.
final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded
// up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
......@@ -61,8 +61,7 @@ public class ImageUtils {
* @param filename The location to save the bitmap to.
*/
public static void saveBitmap(final Bitmap bitmap, final String filename) {
final String root =
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
final String root = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
final File myDir = new File(root);
......@@ -128,16 +127,8 @@ public class ImageUtils {
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
public static void convertYUV420ToARGB8888(
byte[] yData,
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out) {
public static void convertYUV420ToARGB8888(byte[] yData, byte[] uData, byte[] vData, int width, int height,
int yRowStride, int uvRowStride, int uvPixelStride, int[] out) {
int yp = 0;
for (int j = 0; j < height; j++) {
int pY = yRowStride * j;
......@@ -152,26 +143,21 @@ public class ImageUtils {
}
/**
* Returns a transformation matrix from one reference frame into another. Handles cropping (if
* maintaining aspect ratio is desired) and rotation.
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
* of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
* cropping the image if necessary.
* @param applyRotation Amount of rotation to apply from one frame to
* another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y
* remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
......
......@@ -19,7 +19,10 @@ import android.util.Log;
import java.util.HashSet;
import java.util.Set;
/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */
/**
* Wrapper for the platform log function, allows convenient message prefixing
* and log disabling.
*/
public final class Logger {
private static final String DEFAULT_TAG = "tensorflow";
private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
......@@ -39,33 +42,18 @@ public final class Logger {
private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
/**
* Creates a Logger using the class name as the message prefix.
* Creates a Logger with a custom tag and a custom message prefix. If the
* message prefix is set to
*
* @param clazz the simple name of this class is used as the message prefix.
*/
public Logger(final Class<?> clazz) {
this(clazz.getSimpleName());
}
/**
* Creates a Logger using the specified message prefix.
*
* @param messagePrefix is prepended to the text of every message.
*/
public Logger(final String messagePrefix) {
this(DEFAULT_TAG, messagePrefix);
}
/**
* Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
*
* <pre>null</pre>
* <pre>
* null
* </pre>
*
* , the caller's class name is used as the prefix.
*
* @param tag identifies the source of a log message.
* @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
* being used
* @param messagePrefix prepended to every message if non-null. If null, the
* name of the caller is being used
*/
public Logger(final String tag, final String messagePrefix) {
this.tag = tag;
......@@ -78,21 +66,17 @@ public final class Logger {
this(DEFAULT_TAG, null);
}
/** Creates a Logger using the caller's class name as the message prefix. */
public Logger(final int minLogLevel) {
this(DEFAULT_TAG, null);
this.minLogLevel = minLogLevel;
}
/**
* Return caller's simple name.
*
* <p>Android getStackTrace() returns an array that looks like this: stackTrace[0]:
* <p>
* Android getStackTrace() returns an array that looks like this: stackTrace[0]:
* dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
* com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
* com.google.android.apps.unveil.BaseApplication
*
* <p>This function returns the simple version of the first non-filtered name.
* <p>
* This function returns the simple version of the first non-filtered name.
*
* @return caller's simple name
*/
......@@ -103,7 +87,8 @@ public final class Logger {
for (final StackTraceElement elem : stackTrace) {
final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) {
// We're only interested in the simple name of the class, not the complete package.
// We're only interested in the simple name of the class, not the complete
// package.
final String[] classParts = className.split("\\.");
return classParts[classParts.length - 1];
}
......@@ -112,10 +97,6 @@ public final class Logger {
return Logger.class.getSimpleName();
}
public void setMinLogLevel(final int minLogLevel) {
this.minLogLevel = minLogLevel;
}
public boolean isLoggable(final int logLevel) {
return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
}
......@@ -130,24 +111,12 @@ public final class Logger {
}
}
public void v(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args), t);
}
}
public void d(final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args));
}
}
public void d(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args), t);
}
}
public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args));
......@@ -166,12 +135,6 @@ public final class Logger {
}
}
public void w(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args), t);
}
}
public void e(final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args));
......
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Bitmap;
import android.text.TextUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/** Size class independent of a Camera object. */
public class Size implements Comparable<Size>, Serializable {
// 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
// upgrading.
public static final long serialVersionUID = 7689808733290872361L;
public final int width;
public final int height;
public Size(final int width, final int height) {
this.width = width;
this.height = height;
}
public Size(final Bitmap bmp) {
this.width = bmp.getWidth();
this.height = bmp.getHeight();
}
/**
* Rotate a size by the given number of degrees.
*
* @param size Size to rotate.
* @param rotation Degrees {0, 90, 180, 270} to rotate the size.
* @return Rotated size.
*/
public static Size getRotatedSize(final Size size, final int rotation) {
if (rotation % 180 != 0) {
// The phone is portrait, therefore the camera is sideways and frame should be rotated.
return new Size(size.height, size.width);
}
return size;
}
public static Size parseFromString(String sizeString) {
if (TextUtils.isEmpty(sizeString)) {
return null;
}
sizeString = sizeString.trim();
// The expected format is "<width>x<height>".
final String[] components = sizeString.split("x");
if (components.length == 2) {
try {
final int width = Integer.parseInt(components[0]);
final int height = Integer.parseInt(components[1]);
return new Size(width, height);
} catch (final NumberFormatException e) {
return null;
}
} else {
return null;
}
}
public static List<Size> sizeStringToList(final String sizes) {
final List<Size> sizeList = new ArrayList<Size>();
if (sizes != null) {
final String[] pairs = sizes.split(",");
for (final String pair : pairs) {
final Size size = Size.parseFromString(pair);
if (size != null) {
sizeList.add(size);
}
}
}
return sizeList;
}
public static String sizeListToString(final List<Size> sizes) {
String sizesString = "";
if (sizes != null && sizes.size() > 0) {
sizesString = sizes.get(0).toString();
for (int i = 1; i < sizes.size(); i++) {
sizesString += "," + sizes.get(i).toString();
}
}
return sizesString;
}
public static final String dimensionsAsString(final int width, final int height) {
return width + "x" + height;
}
public final float aspectRatio() {
return (float) width / (float) height;
}
@Override
public int compareTo(final Size other) {
return width * height - other.width * other.height;
}
@Override
public boolean equals(final Object other) {
if (other == null) {
return false;
}
if (!(other instanceof Size)) {
return false;
}
final Size otherSize = (Size) other;
return (width == otherSize.width && height == otherSize.height);
}
@Override
public int hashCode() {
return width * 32713 + height;
}
@Override
public String toString() {
return dimensionsAsString(width, height);
}
}
package com.agenew.detection.env;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.os.Environment;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
......@@ -63,149 +54,4 @@ public class Utils {
return result;
}
public static void softmax(final float[] vals) {
float max = Float.NEGATIVE_INFINITY;
for (final float val : vals) {
max = Math.max(max, val);
}
float sum = 0.0f;
for (int i = 0; i < vals.length; ++i) {
vals[i] = (float) Math.exp(vals[i] - max);
sum += vals[i];
}
for (int i = 0; i < vals.length; ++i) {
vals[i] = vals[i] / sum;
}
}
public static float expit(final float x) {
return (float) (1. / (1. + Math.exp(-x)));
}
// public static Bitmap scale(Context context, String filePath) {
// AssetManager assetManager = context.getAssets();
//
// InputStream istr;
// Bitmap bitmap = null;
// try {
// istr = assetManager.open(filePath);
// bitmap = BitmapFactory.decodeStream(istr);
// bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
// } catch (IOException e) {
// // handle exception
// Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
// }
//
// return bitmap;
// }
public static Bitmap getBitmapFromAsset(Context context, String filePath) {
AssetManager assetManager = context.getAssets();
InputStream istr;
Bitmap bitmap = null;
try {
istr = assetManager.open(filePath);
bitmap = BitmapFactory.decodeStream(istr);
// return bitmap.copy(Bitmap.Config.ARGB_8888,true);
} catch (IOException e) {
// handle exception
Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
}
return bitmap;
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to
* another. Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y
* remains constant, cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(final int srcWidth, final int srcHeight, final int dstWidth,
final int dstHeight, final int applyRotation, final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
public static Bitmap processBitmap(Bitmap source, int size) {
int image_height = source.getHeight();
int image_width = source.getWidth();
Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransformations = getTransformationMatrix(image_width, image_height, size, size, 0, false);
Matrix cropToFrameTransformations = new Matrix();
frameToCropTransformations.invert(cropToFrameTransformations);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(source, frameToCropTransformations, null);
return croppedBitmap;
}
public static void writeToFile(String data, Context context) {
try {
String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
String fileName = "myFile.txt";
File file = new File(baseDir + File.separator + fileName);
FileOutputStream stream = new FileOutputStream(file);
try {
stream.write(data.getBytes());
} finally {
stream.close();
}
} catch (IOException e) {
Log.e("Exception", "File write failed: " + e.toString());
}
}
}
......@@ -34,8 +34,6 @@ public interface Classifier {
void setNumThreads(int num_threads);
void setUseNNAPI(boolean isChecked);
abstract float getObjThresh();
/**
......
......@@ -5,30 +5,19 @@ import android.content.res.AssetManager;
import java.io.IOException;
public class DetectorFactory {
public static YoloV5Classifier getDetector(
final AssetManager assetManager,
final String modelFilename)
public static YoloV5Classifier getDetector(final AssetManager assetManager, final String modelFilename)
throws IOException {
String labelFilename = null;
boolean isQuantized = false;
int inputSize = 0;
int[] output_width = new int[]{0};
int[][] masks = new int[][]{{0}};
int[] anchors = new int[]{0};
if (modelFilename.endsWith(".tflite")) {
labelFilename = "file:///android_asset/class.txt";
isQuantized = modelFilename.endsWith("-int8.tflite");
inputSize = 640;
output_width = new int[]{80, 40, 20};
masks = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
anchors = new int[]{
10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
};
}
return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized,
inputSize);
return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized, inputSize);
}
}
......@@ -18,7 +18,6 @@ package com.agenew.detection.tflite;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.RectF;
import android.os.Build;
import android.util.Log;
//import org.tensorflow.lite.Interpreter;
......@@ -45,15 +44,17 @@ import java.util.Map;
import java.util.PriorityQueue;
import java.util.Vector;
/**
* Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
* - https://github.com/tensorflow/models/tree/master/research/object_detection
* Wrapper for frozen detection models trained using the Tensorflow Object
* Detection API: -
* https://github.com/tensorflow/models/tree/master/research/object_detection
* where you can find the training code.
* <p>
* To use pretrained models in the API or convert to TF Lite models, please see docs for details:
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
* To use pretrained models in the API or convert to TF Lite models, please see
* docs for details: -
* https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
* -
* https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
*/
public class YoloV5Classifier implements Classifier {
private static final String TAG = "YoloV5Classifier";
......@@ -66,16 +67,8 @@ public class YoloV5Classifier implements Classifier {
* @param labelFilename The filepath of label file for classes.
* @param isQuantized Boolean representing model is quantized or not
*/
public static YoloV5Classifier create(
final AssetManager assetManager,
final String modelFilename,
final String labelFilename,
final boolean isQuantized,
final int inputSize
/*final int[] output_width,
final int[][] masks,
final int[] anchors*/)
throws IOException {
public static YoloV5Classifier create(final AssetManager assetManager, final String modelFilename,
final String labelFilename, final boolean isQuantized, final int inputSize) throws IOException {
final YoloV5Classifier d = new YoloV5Classifier();
String actualFilename = labelFilename.split("file:///android_asset/")[1];
......@@ -91,27 +84,6 @@ public class YoloV5Classifier implements Classifier {
try {
Interpreter.Options options = (new Interpreter.Options());
options.setNumThreads(NUM_THREADS);
if (isNNAPI) {
d.nnapiDelegate = null;
// Initialize interpreter with NNAPI delegate for Android Pie or above
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
d.nnapiDelegate = new NnApiDelegate();
options.addDelegate(d.nnapiDelegate);
options.setNumThreads(NUM_THREADS);
// options.setUseNNAPI(false);
// options.setAllowFp16PrecisionForFp32(true);
// options.setAllowBufferHandleOutput(true);
options.setUseNNAPI(true);
}
}
/*
if (isGPU) {
GpuDelegate.Options gpu_options = new GpuDelegate.Options();
gpu_options.setPrecisionLossAllowed(true); // It seems that the default is true
gpu_options.setInferencePreference(GpuDelegate.Options.INFERENCE_PREFERENCE_SUSTAINED_SPEED);
d.gpuDelegate = new GpuDelegate(gpu_options);
options.addDelegate(d.gpuDelegate);
}*/
d.tfliteModel = Utils.loadModelFile(assetManager, modelFilename);
d.tfLite = new Interpreter(d.tfliteModel, options);
} catch (Exception e) {
......@@ -131,11 +103,9 @@ public class YoloV5Classifier implements Classifier {
d.imgData.order(ByteOrder.nativeOrder());
d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2) + Math.pow((inputSize / 8), 2)) * 3);
// d.OUTPUT_WIDTH = output_width;
// d.MASKS = masks;
// d.ANCHORS = anchors;
if (d.isModelQuantized){
d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2)
+ Math.pow((inputSize / 8), 2)) * 3);
if (d.isModelQuantized) {
Tensor inpten = d.tfLite.getInputTensor(0);
d.inp_scale = inpten.quantizationParams().getScale();
d.inp_zero_point = inpten.quantizationParams().getZeroPoint();
......@@ -156,6 +126,7 @@ public class YoloV5Classifier implements Classifier {
public int getInputSize() {
return INPUT_SIZE;
}
@Override
public void enableStatLogging(final boolean logStats) {
}
......@@ -170,10 +141,8 @@ public class YoloV5Classifier implements Classifier {
tfLite.close();
tfLite = null;
/*
if (gpuDelegate != null) {
gpuDelegate.close();
gpuDelegate = null;
}*/
* if (gpuDelegate != null) { gpuDelegate.close(); gpuDelegate = null; }
*/
if (nnapiDelegate != null) {
nnapiDelegate.close();
nnapiDelegate = null;
......@@ -182,12 +151,8 @@ public class YoloV5Classifier implements Classifier {
}
public void setNumThreads(int num_threads) {
if (tfLite != null) tfLite.setNumThreads(num_threads);
}
@Override
public void setUseNNAPI(boolean isChecked) {
// if (tfLite != null) tfLite.setUseNNAPI(isChecked);
if (tfLite != null)
tfLite.setNumThreads(num_threads);
}
private void recreateInterpreter() {
......@@ -199,11 +164,9 @@ public class YoloV5Classifier implements Classifier {
public void useGpu() {
/*
if (gpuDelegate == null) {
gpuDelegate = new GpuDelegate();
tfliteOptions.addDelegate(gpuDelegate);
recreateInterpreter();
}*/
* if (gpuDelegate == null) { gpuDelegate = new GpuDelegate();
* tfliteOptions.addDelegate(gpuDelegate); recreateInterpreter(); }
*/
}
public void useCPU() {
......@@ -228,22 +191,13 @@ public class YoloV5Classifier implements Classifier {
private final float IMAGE_STD = 255.0f;
//config yolo
// config yolo
private int INPUT_SIZE = -1;
// private int[] OUTPUT_WIDTH;
// private int[][] MASKS;
// private int[] ANCHORS;
private int output_box;
private static final float[] XYSCALE = new float[]{1.2f, 1.1f, 1.05f};
private static final int NUM_BOXES_PER_BLOCK = 3;
// Number of threads in the java app
private static final int NUM_THREADS = 1;
private static boolean isNNAPI = false;
private static boolean isGPU = false;
private boolean isModelQuantized;
......@@ -273,19 +227,17 @@ public class YoloV5Classifier implements Classifier {
private float oup_scale;
private int oup_zero_point;
private int numClass;
private YoloV5Classifier() {
}
//non maximum suppression
// non maximum suppression
protected ArrayList<Recognition> nms(ArrayList<Recognition> list) {
ArrayList<Recognition> nmsList = new ArrayList<Recognition>();
for (int k = 0; k < labels.size(); k++) {
//1.find max confidence per class
PriorityQueue<Recognition> pq =
new PriorityQueue<Recognition>(
50,
new Comparator<Recognition>() {
// 1.find max confidence per class
PriorityQueue<Recognition> pq = new PriorityQueue<Recognition>(50, new Comparator<Recognition>() {
@Override
public int compare(final Recognition lhs, final Recognition rhs) {
// Intentionally reversed to put high confidence at the head of the queue.
......@@ -299,9 +251,9 @@ public class YoloV5Classifier implements Classifier {
}
}
//2.do non maximum suppression
// 2.do non maximum suppression
while (pq.size() > 0) {
//insert detection with max confidence
// insert detection with max confidence
Recognition[] a = new Recognition[pq.size()];
Recognition[] detections = pq.toArray(a);
Recognition max = detections[0];
......@@ -327,11 +279,10 @@ public class YoloV5Classifier implements Classifier {
}
protected float box_intersection(RectF a, RectF b) {
float w = overlap((a.left + a.right) / 2, a.right - a.left,
(b.left + b.right) / 2, b.right - b.left);
float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top,
(b.top + b.bottom) / 2, b.bottom - b.top);
if (w < 0 || h < 0) return 0;
float w = overlap((a.left + a.right) / 2, a.right - a.left, (b.left + b.right) / 2, b.right - b.left);
float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top, (b.top + b.bottom) / 2, b.bottom - b.top);
if (w < 0 || h < 0)
return 0;
float area = w * h;
return area;
}
......@@ -359,11 +310,7 @@ public class YoloV5Classifier implements Classifier {
* Writes Image data into a {@code ByteBuffer}.
*/
protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
// ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * BATCH_SIZE * INPUT_SIZE * INPUT_SIZE * PIXEL_SIZE);
// byteBuffer.order(ByteOrder.nativeOrder());
// int[] intValues = new int[INPUT_SIZE * INPUT_SIZE];
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int pixel = 0;
imgData.rewind();
for (int i = 0; i < INPUT_SIZE; ++i) {
......@@ -371,8 +318,10 @@ public class YoloV5Classifier implements Classifier {
int pixelValue = intValues[i * INPUT_SIZE + j];
if (isModelQuantized) {
// Quantized model
imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale
+ inp_zero_point));
imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale
+ inp_zero_point));
imgData.put((byte) (((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
} else { // Float model
imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
......@@ -385,16 +334,14 @@ public class YoloV5Classifier implements Classifier {
}
public ArrayList<Recognition> recognizeImage(Bitmap bitmap) {
ByteBuffer byteBuffer_ = convertBitmapToByteBuffer(bitmap);
convertBitmapToByteBuffer(bitmap);
Map<Integer, Object> outputMap = new HashMap<>();
// float[][][] outbuf = new float[1][output_box][labels.size() + 5];
Map<Integer, Object> outputMap = new HashMap<Integer, Object>();
outData.rewind();
outputMap.put(0, outData);
Log.d("YoloV5Classifier", "mObjThresh: " + getObjThresh());
Object[] inputArray = {imgData};
Object[] inputArray = { imgData };
tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
ByteBuffer byteBuffer = (ByteBuffer) outputMap.get(0);
......@@ -406,10 +353,9 @@ public class YoloV5Classifier implements Classifier {
Log.d("YoloV5Classifier", "out[0] detect start");
for (int i = 0; i < output_box; ++i) {
for (int j = 0; j < numClass + 5; ++j) {
if (isModelQuantized){
if (isModelQuantized) {
out[0][i][j] = oup_scale * (((int) byteBuffer.get() & 0xFF) - oup_zero_point);
}
else {
} else {
out[0][i][j] = byteBuffer.getFloat();
}
}
......@@ -418,7 +364,7 @@ public class YoloV5Classifier implements Classifier {
out[0][i][j] *= getInputSize();
}
}
for (int i = 0; i < output_box; ++i){
for (int i = 0; i < output_box; ++i) {
final int offset = 0;
final float confidence = out[0][i][4];
int detectedClass = -1;
......@@ -443,38 +389,33 @@ public class YoloV5Classifier implements Classifier {
final float w = out[0][i][2];
final float h = out[0][i][3];
Log.d("YoloV5Classifier",
Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h);
Log.d("YoloV5Classifier", Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h);
final RectF rect =
new RectF(
Math.max(0, xPos - w / 2),
Math.max(0, yPos - h / 2),
Math.min(bitmap.getWidth() - 1, xPos + w / 2),
Math.min(bitmap.getHeight() - 1, yPos + h / 2));
detections.add(new Recognition("" + offset, labels.get(detectedClass),
confidenceInClass, rect, detectedClass));
final RectF rect = new RectF(Math.max(0, xPos - w / 2), Math.max(0, yPos - h / 2),
Math.min(bitmap.getWidth() - 1, xPos + w / 2), Math.min(bitmap.getHeight() - 1, yPos + h / 2));
detections.add(new Recognition("" + offset, labels.get(detectedClass), confidenceInClass, rect,
detectedClass));
}
}
Log.d(TAG, "detect end");
final ArrayList<Recognition> recognitions = nms(detections);
// final ArrayList<Recognition> recognitions = detections;
return recognitions;
}
public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH, int intputSize) {
public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH,
int intputSize) {
// (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
float halfHeight = height / 2.0f;
float halfWidth = width / 2.0f;
float[] pred_coor = new float[]{x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight};
float[] pred_coor = new float[] { x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight };
// (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
float resize_ratioW = 1.0f * intputSize / oriW;
float resize_ratioH = 1.0f * intputSize / oriH;
float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; //min
float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; // min
float dw = (intputSize - resize_ratio * oriW) / 2;
float dh = (intputSize - resize_ratio * oriH) / 2;
......
......@@ -35,27 +35,17 @@ import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger;
import com.agenew.detection.tflite.Classifier.Recognition;
/** A tracker that handles non-max suppression and matches existing objects to new detections. */
/**
* A tracker that handles non-max suppression and matches existing objects to
* new detections.
*/
public class MultiBoxTracker {
private static final float TEXT_SIZE_DIP = 18;
private static final float MIN_SIZE = 16.0f;
private static final int[] COLORS = {
Color.BLUE,
Color.RED,
Color.GREEN,
Color.YELLOW,
Color.CYAN,
Color.MAGENTA,
Color.WHITE,
Color.parseColor("#55FF55"),
Color.parseColor("#FFA500"),
Color.parseColor("#FF8888"),
Color.parseColor("#AAAAFF"),
Color.parseColor("#FFFFAA"),
Color.parseColor("#55AAAA"),
Color.parseColor("#AA33AA"),
Color.parseColor("#0D0068")
};
private static final int[] COLORS = { Color.BLUE, Color.RED, Color.GREEN, Color.YELLOW, Color.CYAN, Color.MAGENTA,
Color.WHITE, Color.parseColor("#55FF55"), Color.parseColor("#FFA500"), Color.parseColor("#FF8888"),
Color.parseColor("#AAAAFF"), Color.parseColor("#FFFFAA"), Color.parseColor("#55AAAA"),
Color.parseColor("#AA33AA"), Color.parseColor("#0D0068") };
final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
private final Logger logger = new Logger();
private final Queue<Integer> availableColors = new LinkedList<Integer>();
......@@ -80,14 +70,12 @@ public class MultiBoxTracker {
boxPaint.setStrokeJoin(Join.ROUND);
boxPaint.setStrokeMiter(100);
textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
textSizePx = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP,
context.getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
}
public synchronized void setFrameConfiguration(
final int width, final int height, final int sensorOrientation) {
public synchronized void setFrameConfiguration(final int width, final int height, final int sensorOrientation) {
frameWidth = width;
frameHeight = height;
this.sensorOrientation = sensorOrientation;
......@@ -122,18 +110,11 @@ public class MultiBoxTracker {
public synchronized void draw(final Canvas canvas) {
final boolean rotated = sensorOrientation % 180 == 90;
final float multiplier =
Math.min(
canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
final float multiplier = Math.min(canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
frameToCanvasMatrix =
ImageUtils.getTransformationMatrix(
frameWidth,
frameHeight,
frameToCanvasMatrix = ImageUtils.getTransformationMatrix(frameWidth, frameHeight,
(int) (multiplier * (rotated ? frameHeight : frameWidth)),
(int) (multiplier * (rotated ? frameWidth : frameHeight)),
sensorOrientation,
false);
(int) (multiplier * (rotated ? frameWidth : frameHeight)), sensorOrientation, false);
for (final TrackedRecognition recognition : trackedObjects) {
final RectF trackedPos = new RectF(recognition.location);
......@@ -143,14 +124,10 @@ public class MultiBoxTracker {
float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
final String labelString =
!TextUtils.isEmpty(recognition.title)
final String labelString = !TextUtils.isEmpty(recognition.title)
? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
: String.format("%.2f", (100 * recognition.detectionConfidence));
// borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top,
// labelString);
borderedText.drawText(
canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
}
}
......@@ -169,8 +146,7 @@ public class MultiBoxTracker {
final RectF detectionScreenRect = new RectF();
rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
logger.v(
"Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
logger.v("Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
......@@ -193,13 +169,8 @@ public class MultiBoxTracker {
trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.title = potential.second.getTitle();
// trackedRecognition.color = COLORS[trackedObjects.size() % COLORS.length];
trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length];
trackedObjects.add(trackedRecognition);
// if (trackedObjects.size() >= COLORS.length) {
// break;
// }
}
}
......
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0"/>
<item
android:color="#00000000"
android:offset="1.0"/>
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<resources>
<!-- Base application theme. -->
<style name="AppTheme.ObjectDetection" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/tfe_color_primary</item>
<item name="colorPrimaryDark">@color/tfe_color_primary_dark</item>
<item name="colorAccent">@color/tfe_color_accent</item>
</style>
</resources>
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!