Commit 00e7d0f7 by wanglei

init

0 parents
Showing with 4663 additions and 0 deletions
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
/build
\ No newline at end of file
plugins {
alias(libs.plugins.androidApplication)
}
android {
namespace 'com.agenew.detection'
compileSdk 34
defaultConfig {
applicationId "com.agenew.detection"
minSdk 24
targetSdk 34
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
aaptOptions {
noCompress "tflite"
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
}
dependencies {
implementation fileTree(include: ['*.jar', '*.aar'], dir: 'libs')
implementation libs.appcompat
implementation libs.material
implementation 'org.tensorflow:tensorflow-lite:2.4.0'
implementation 'org.tensorflow:tensorflow-lite-gpu:2.4.0'
}
\ No newline at end of file
No preview for this file type
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools">
<uses-feature android:glEsVersion="0x00030001" android:required="true" />
<uses-sdk />
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE"/>
<uses-permission android:name="android.permission.INTERNET"/>
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/tfe_od_app_name"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/AppTheme.ObjectDetection"
android:hardwareAccelerated="true"
android:installLocation="internalOnly">
<activity
android:name=".MainActivity"
android:label="@string/tfe_od_app_name"
android:screenOrientation="portrait"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
\ No newline at end of file
No preview for this file type
badkey
phone
\ No newline at end of file
/*
* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agenew.detection;
import android.Manifest;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.hardware.Camera;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.Image.Plane;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Trace;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
//import androidx.appcompat.widget.Toolbar;
import android.util.Size;
import android.view.Surface;
import android.view.View;
import android.view.ViewTreeObserver;
import android.view.WindowManager;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.google.android.material.bottomsheet.BottomSheetBehavior;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger;
public abstract class CameraActivity extends AppCompatActivity
implements OnImageAvailableListener,
Camera.PreviewCallback,
// CompoundButton.OnCheckedChangeListener,
View.OnClickListener {
private static final Logger LOGGER = new Logger();
private static final int PERMISSIONS_REQUEST = 1;
private static final String PERMISSION_CAMERA = Manifest.permission.CAMERA;
private static final String ASSET_PATH = "";
protected int previewWidth = 0;
protected int previewHeight = 0;
private boolean debug = false;
protected Handler handler;
private HandlerThread handlerThread;
private boolean useCamera2API;
private boolean isProcessingFrame = false;
private byte[][] yuvBytes = new byte[3][];
private int[] rgbBytes = null;
private int yRowStride;
protected int defaultModelIndex = 0;
protected int defaultDeviceIndex = 2;
private Runnable postInferenceCallback;
private Runnable imageConverter;
protected ArrayList<String> modelStrings = new ArrayList<String>();
private LinearLayout bottomSheetLayout;
private LinearLayout gestureLayout;
private BottomSheetBehavior<LinearLayout> sheetBehavior;
protected TextView frameValueTextView, cropValueTextView, inferenceTimeTextView;
protected ImageView bottomSheetArrowImageView;
private ImageView plusImageView, minusImageView;
protected ListView deviceView;
protected TextView threadsTextView;
protected ListView modelView;
/** Current indices of device and model. */
int currentDevice = -1;
int currentModel = -1;
int currentNumThreads = -1;
ArrayList<String> deviceStrings = new ArrayList<String>();
@Override
protected void onCreate(final Bundle savedInstanceState) {
LOGGER.d("onCreate " + this);
super.onCreate(null);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.tfe_od_activity_camera);
// Toolbar toolbar = findViewById(R.id.toolbar);
// setSupportActionBar(toolbar);
// getSupportActionBar().setDisplayShowTitleEnabled(false);
if (hasPermission()) {
setFragment();
} else {
requestPermission();
}
threadsTextView = findViewById(R.id.threads);
currentNumThreads = Integer.parseInt(threadsTextView.getText().toString().trim());
plusImageView = findViewById(R.id.plus);
minusImageView = findViewById(R.id.minus);
deviceView = findViewById(R.id.device_list);
deviceStrings.add("CPU");
deviceStrings.add("GPU");
deviceStrings.add("NNAPI");
deviceView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
ArrayAdapter<String> deviceAdapter =
new ArrayAdapter<>(
CameraActivity.this , R.layout.deviceview_row, R.id.deviceview_row_text, deviceStrings);
deviceView.setAdapter(deviceAdapter);
deviceView.setItemChecked(defaultDeviceIndex, true);
currentDevice = defaultDeviceIndex;
deviceView.setOnItemClickListener(
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel();
}
});
bottomSheetLayout = findViewById(R.id.bottom_sheet_layout);
gestureLayout = findViewById(R.id.gesture_layout);
sheetBehavior = BottomSheetBehavior.from(bottomSheetLayout);
bottomSheetArrowImageView = findViewById(R.id.bottom_sheet_arrow);
modelView = findViewById((R.id.model_list));
modelStrings = getModelStrings(getAssets(), ASSET_PATH);
modelView.setChoiceMode(ListView.CHOICE_MODE_SINGLE);
ArrayAdapter<String> modelAdapter =
new ArrayAdapter<>(
CameraActivity.this , R.layout.listview_row, R.id.listview_row_text, modelStrings);
modelView.setAdapter(modelAdapter);
modelView.setItemChecked(defaultModelIndex, true);
currentModel = defaultModelIndex;
modelView.setOnItemClickListener(
new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
updateActiveModel();
}
});
ViewTreeObserver vto = gestureLayout.getViewTreeObserver();
vto.addOnGlobalLayoutListener(
new ViewTreeObserver.OnGlobalLayoutListener() {
@Override
public void onGlobalLayout() {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
gestureLayout.getViewTreeObserver().removeGlobalOnLayoutListener(this);
} else {
gestureLayout.getViewTreeObserver().removeOnGlobalLayoutListener(this);
}
// int width = bottomSheetLayout.getMeasuredWidth();
int height = gestureLayout.getMeasuredHeight();
sheetBehavior.setPeekHeight(height);
}
});
sheetBehavior.setHideable(false);
sheetBehavior.setBottomSheetCallback(
new BottomSheetBehavior.BottomSheetCallback() {
@Override
public void onStateChanged(@NonNull View bottomSheet, int newState) {
switch (newState) {
case BottomSheetBehavior.STATE_HIDDEN:
break;
case BottomSheetBehavior.STATE_EXPANDED:
{
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_down);
}
break;
case BottomSheetBehavior.STATE_COLLAPSED:
{
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
}
break;
case BottomSheetBehavior.STATE_DRAGGING:
break;
case BottomSheetBehavior.STATE_SETTLING:
bottomSheetArrowImageView.setImageResource(R.drawable.icn_chevron_up);
break;
}
}
@Override
public void onSlide(@NonNull View bottomSheet, float slideOffset) {}
});
frameValueTextView = findViewById(R.id.frame_info);
cropValueTextView = findViewById(R.id.crop_info);
inferenceTimeTextView = findViewById(R.id.inference_info);
plusImageView.setOnClickListener(this);
minusImageView.setOnClickListener(this);
}
protected ArrayList<String> getModelStrings(AssetManager mgr, String path){
ArrayList<String> res = new ArrayList<String>();
try {
String[] files = mgr.list(path);
for (String file : files) {
String[] splits = file.split("\\.");
if (splits[splits.length - 1].equals("tflite")) {
res.add(file);
}
}
}
catch (IOException e){
System.err.println("getModelStrings: " + e.getMessage());
}
return res;
}
protected int[] getRgbBytes() {
imageConverter.run();
return rgbBytes;
}
protected int getLuminanceStride() {
return yRowStride;
}
protected byte[] getLuminance() {
return yuvBytes[0];
}
/** Callback for android.hardware.Camera API */
@Override
public void onPreviewFrame(final byte[] bytes, final Camera camera) {
if (isProcessingFrame) {
LOGGER.w("Dropping frame!");
return;
}
try {
// Initialize the storage bitmaps once when the resolution is known.
if (rgbBytes == null) {
Camera.Size previewSize = camera.getParameters().getPreviewSize();
previewHeight = previewSize.height;
previewWidth = previewSize.width;
rgbBytes = new int[previewWidth * previewHeight];
onPreviewSizeChosen(new Size(previewSize.width, previewSize.height), 90);
}
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
return;
}
isProcessingFrame = true;
yuvBytes[0] = bytes;
yRowStride = previewWidth;
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420SPToARGB8888(bytes, previewWidth, previewHeight, rgbBytes);
}
};
postInferenceCallback =
new Runnable() {
@Override
public void run() {
camera.addCallbackBuffer(bytes);
isProcessingFrame = false;
}
};
processImage();
}
/** Callback for Camera2 API */
@Override
public void onImageAvailable(final ImageReader reader) {
// We need wait until we have some size from onPreviewSizeChosen
if (previewWidth == 0 || previewHeight == 0) {
return;
}
if (rgbBytes == null) {
rgbBytes = new int[previewWidth * previewHeight];
}
try {
final Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
if (isProcessingFrame) {
image.close();
return;
}
isProcessingFrame = true;
Trace.beginSection("imageAvailable");
final Plane[] planes = image.getPlanes();
fillBytes(planes, yuvBytes);
yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
imageConverter =
new Runnable() {
@Override
public void run() {
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
}
};
postInferenceCallback =
new Runnable() {
@Override
public void run() {
image.close();
isProcessingFrame = false;
}
};
processImage();
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
Trace.endSection();
return;
}
Trace.endSection();
}
@Override
public synchronized void onStart() {
LOGGER.d("onStart " + this);
super.onStart();
}
@Override
public synchronized void onResume() {
LOGGER.d("onResume " + this);
super.onResume();
handlerThread = new HandlerThread("inference");
handlerThread.start();
handler = new Handler(handlerThread.getLooper());
}
@Override
public synchronized void onPause() {
LOGGER.d("onPause " + this);
handlerThread.quitSafely();
try {
handlerThread.join();
handlerThread = null;
handler = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
super.onPause();
}
@Override
public synchronized void onStop() {
LOGGER.d("onStop " + this);
super.onStop();
}
@Override
public synchronized void onDestroy() {
LOGGER.d("onDestroy " + this);
super.onDestroy();
}
protected synchronized void runInBackground(final Runnable r) {
if (handler != null) {
handler.post(r);
}
}
@Override
public void onRequestPermissionsResult(
final int requestCode, final String[] permissions, final int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == PERMISSIONS_REQUEST) {
if (allPermissionsGranted(grantResults)) {
setFragment();
} else {
requestPermission();
}
}
}
private static boolean allPermissionsGranted(final int[] grantResults) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(PERMISSION_CAMERA) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(PERMISSION_CAMERA)) {
Toast.makeText(
CameraActivity.this,
"Camera permission is required for this demo",
Toast.LENGTH_LONG)
.show();
}
requestPermissions(new String[] {PERMISSION_CAMERA}, PERMISSIONS_REQUEST);
}
}
// Returns true if the device supports the required hardware level, or better.
private boolean isHardwareLevelSupported(
CameraCharacteristics characteristics, int requiredLevel) {
int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
return requiredLevel == deviceLevel;
}
// deviceLevel is not LEGACY, can use numerical sort
return requiredLevel <= deviceLevel;
}
private String chooseCamera() {
final CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
for (final String cameraId : manager.getCameraIdList()) {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// We don't use a front facing camera in this sample.
final Integer facing = characteristics.get(CameraCharacteristics.LENS_FACING);
if (facing != null && facing == CameraCharacteristics.LENS_FACING_FRONT) {
continue;
}
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
continue;
}
// Fallback to camera1 API for internal cameras that don't have full support.
// This should help with legacy situations where using the camera2 API causes
// distorted or otherwise broken previews.
useCamera2API =
(facing == CameraCharacteristics.LENS_FACING_EXTERNAL)
|| isHardwareLevelSupported(
characteristics, CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL);
LOGGER.i("Camera API lv2?: %s", useCamera2API);
return cameraId;
}
} catch (CameraAccessException e) {
LOGGER.e(e, "Not allowed to access camera");
}
return null;
}
protected void setFragment() {
String cameraId = chooseCamera();
Fragment fragment;
if (useCamera2API) {
CameraConnectionFragment camera2Fragment =
CameraConnectionFragment.newInstance(
new CameraConnectionFragment.ConnectionCallback() {
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
previewHeight = size.getHeight();
previewWidth = size.getWidth();
CameraActivity.this.onPreviewSizeChosen(size, rotation);
}
},
this,
getLayoutId(),
getDesiredPreviewFrameSize());
camera2Fragment.setCamera(cameraId);
fragment = camera2Fragment;
} else {
fragment =
new LegacyCameraConnectionFragment(this, getLayoutId(), getDesiredPreviewFrameSize());
}
getFragmentManager().beginTransaction().replace(R.id.container, fragment).commit();
}
protected void fillBytes(final Plane[] planes, final byte[][] yuvBytes) {
// Because of the variable row stride it's not possible to know in
// advance the actual necessary dimensions of the yuv planes.
for (int i = 0; i < planes.length; ++i) {
final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) {
LOGGER.d("Initializing buffer %d at size %d", i, buffer.capacity());
yuvBytes[i] = new byte[buffer.capacity()];
}
buffer.get(yuvBytes[i]);
}
}
public boolean isDebug() {
return debug;
}
protected void readyForNextImage() {
if (postInferenceCallback != null) {
postInferenceCallback.run();
}
}
protected int getScreenOrientation() {
switch (getWindowManager().getDefaultDisplay().getRotation()) {
case Surface.ROTATION_270:
return 270;
case Surface.ROTATION_180:
return 180;
case Surface.ROTATION_90:
return 90;
default:
return 0;
}
}
// @Override
// public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
// setUseNNAPI(isChecked);
// if (isChecked) apiSwitchCompat.setText("NNAPI");
// else apiSwitchCompat.setText("TFLITE");
// }
@Override
public void onClick(View v) {
if (v.getId() == R.id.plus) {
String threads = threadsTextView.getText().toString().trim();
int numThreads = Integer.parseInt(threads);
if (numThreads >= 9) return;
numThreads++;
threadsTextView.setText(String.valueOf(numThreads));
setNumThreads(numThreads);
} else if (v.getId() == R.id.minus) {
String threads = threadsTextView.getText().toString().trim();
int numThreads = Integer.parseInt(threads);
if (numThreads == 1) {
return;
}
numThreads--;
threadsTextView.setText(String.valueOf(numThreads));
setNumThreads(numThreads);
}
}
protected void showFrameInfo(String frameInfo) {
frameValueTextView.setText(frameInfo);
}
protected void showCropInfo(String cropInfo) {
cropValueTextView.setText(cropInfo);
}
protected void showInference(String inferenceTime) {
inferenceTimeTextView.setText(inferenceTime);
}
protected abstract void updateActiveModel();
protected abstract void processImage();
protected abstract void onPreviewSizeChosen(final Size size, final int rotation);
protected abstract int getLayoutId();
protected abstract Size getDesiredPreviewFrameSize();
protected abstract void setNumThreads(int numThreads);
protected abstract void setUseNNAPI(boolean isChecked);
}
/*
* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agenew.detection;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.text.TextUtils;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import com.agenew.detection.customview.AutoFitTextureView;
import com.agenew.detection.env.Logger;
@SuppressLint("ValidFragment")
public class CameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/**
* The camera preview size will be chosen to be the smallest frame by pixel size capable of
* containing a DESIRED_SIZE x DESIRED_SIZE square.
*/
private static final int MINIMUM_PREVIEW_SIZE = 320;
/** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String FRAGMENT_DIALOG = "dialog";
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
/** A {@link Semaphore} to prevent the app from exiting before closing the camera. */
private final Semaphore cameraOpenCloseLock = new Semaphore(1);
/** A {@link OnImageAvailableListener} to receive frames as they are available. */
private final OnImageAvailableListener imageListener;
/** The input size in pixels desired by TensorFlow (width and height of a square bitmap). */
private final Size inputSize;
/** The layout identifier to inflate for this Fragment. */
private final int layout;
private final ConnectionCallback cameraConnectionCallback;
private final CameraCaptureSession.CaptureCallback captureCallback =
new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureProgressed(
final CameraCaptureSession session,
final CaptureRequest request,
final CaptureResult partialResult) {}
@Override
public void onCaptureCompleted(
final CameraCaptureSession session,
final CaptureRequest request,
final TotalCaptureResult result) {}
};
/** ID of the current {@link CameraDevice}. */
private String cameraId;
/** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/** A {@link CameraCaptureSession } for camera preview. */
private CameraCaptureSession captureSession;
/** A reference to the opened {@link CameraDevice}. */
private CameraDevice cameraDevice;
/** The rotation in degrees of the camera sensor from the display. */
private Integer sensorOrientation;
/** The {@link Size} of camera preview. */
private Size previewSize;
/** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread;
/** A {@link Handler} for running tasks in the background. */
private Handler backgroundHandler;
/** An {@link ImageReader} that handles preview frame capture. */
private ImageReader previewReader;
/** {@link CaptureRequest.Builder} for the camera preview */
private CaptureRequest.Builder previewRequestBuilder;
/** {@link CaptureRequest} generated by {@link #previewRequestBuilder} */
private CaptureRequest previewRequest;
/** {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its state. */
private final CameraDevice.StateCallback stateCallback =
new CameraDevice.StateCallback() {
@Override
public void onOpened(final CameraDevice cd) {
// This method is called when the camera is opened. We start camera preview here.
cameraOpenCloseLock.release();
cameraDevice = cd;
createCameraPreviewSession();
}
@Override
public void onDisconnected(final CameraDevice cd) {
cameraOpenCloseLock.release();
cd.close();
cameraDevice = null;
}
@Override
public void onError(final CameraDevice cd, final int error) {
cameraOpenCloseLock.release();
cd.close();
cameraDevice = null;
final Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
* TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
private CameraConnectionFragment(
final ConnectionCallback connectionCallback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
this.cameraConnectionCallback = connectionCallback;
this.imageListener = imageListener;
this.layout = layout;
this.inputSize = inputSize;
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the minimum of both, or an exact match if possible.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
protected static Size chooseOptimalSize(final Size[] choices, final int width, final int height) {
final int minSize = Math.max(Math.min(width, height), MINIMUM_PREVIEW_SIZE);
final Size desiredSize = new Size(width, height);
// Collect the supported resolutions that are at least as big as the preview Surface
boolean exactSizeFound = false;
final List<Size> bigEnough = new ArrayList<Size>();
final List<Size> tooSmall = new ArrayList<Size>();
for (final Size option : choices) {
if (option.equals(desiredSize)) {
// Set the size but don't return yet so that remaining sizes will still be logged.
exactSizeFound = true;
}
if (option.getHeight() >= minSize && option.getWidth() >= minSize) {
bigEnough.add(option);
} else {
tooSmall.add(option);
}
}
LOGGER.i("Desired size: " + desiredSize + ", min size: " + minSize + "x" + minSize);
LOGGER.i("Valid preview sizes: [" + TextUtils.join(", ", bigEnough) + "]");
LOGGER.i("Rejected preview sizes: [" + TextUtils.join(", ", tooSmall) + "]");
if (exactSizeFound) {
LOGGER.i("Exact size match found.");
return desiredSize;
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
final Size chosenSize = Collections.min(bigEnough, new CompareSizesByArea());
LOGGER.i("Chosen size: " + chosenSize.getWidth() + "x" + chosenSize.getHeight());
return chosenSize;
} else {
LOGGER.e("Couldn't find any suitable preview size");
return choices[0];
}
}
public static CameraConnectionFragment newInstance(
final ConnectionCallback callback,
final OnImageAvailableListener imageListener,
final int layout,
final Size inputSize) {
return new CameraConnectionFragment(callback, imageListener, layout, inputSize);
}
/**
* Shows a {@link Toast} on the UI thread.
*
* @param text The message to show
*/
private void showToast(final String text) {
final Activity activity = getActivity();
if (activity != null) {
activity.runOnUiThread(
new Runnable() {
@Override
public void run() {
Toast.makeText(activity, text, Toast.LENGTH_SHORT).show();
}
});
}
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
openCamera(textureView.getWidth(), textureView.getHeight());
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
public void setCamera(String cameraId) {
this.cameraId = cameraId;
}
/** Sets up member variables related to camera. */
private void setUpCameraOutputs() {
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
final CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
final StreamConfigurationMap map =
characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize =
chooseOptimalSize(
map.getOutputSizes(SurfaceTexture.class),
inputSize.getWidth(),
inputSize.getHeight());
// We fit the aspect ratio of TextureView to the size of preview we picked.
final int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
textureView.setAspectRatio(previewSize.getWidth(), previewSize.getHeight());
} else {
textureView.setAspectRatio(previewSize.getHeight(), previewSize.getWidth());
}
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
ErrorDialog.newInstance(getString(R.string.tfe_od_camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
throw new IllegalStateException(getString(R.string.tfe_od_camera_error));
}
cameraConnectionCallback.onPreviewSizeChosen(previewSize, sensorOrientation);
}
/** Opens the camera specified by {@link CameraConnectionFragment#cameraId}. */
private void openCamera(final int width, final int height) {
setUpCameraOutputs();
configureTransform(width, height);
final Activity activity = getActivity();
final CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(cameraId, stateCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
} catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.", e);
}
}
/** Closes the current {@link CameraDevice}. */
private void closeCamera() {
try {
cameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != previewReader) {
previewReader.close();
previewReader = null;
}
} catch (final InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
cameraOpenCloseLock.release();
}
}
/** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() {
backgroundThread = new HandlerThread("ImageListener");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
/** Stops the background thread and its {@link Handler}. */
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
/** Creates a new {@link CameraCaptureSession} for camera preview. */
private void createCameraPreviewSession() {
try {
final SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// This is the output Surface we need to start preview.
final Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
previewRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(surface);
LOGGER.i("Opening camera preview: " + previewSize.getWidth() + "x" + previewSize.getHeight());
// Create the reader for the preview frames.
previewReader =
ImageReader.newInstance(
previewSize.getWidth(), previewSize.getHeight(), ImageFormat.YUV_420_888, 2);
previewReader.setOnImageAvailableListener(imageListener, backgroundHandler);
previewRequestBuilder.addTarget(previewReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(
Arrays.asList(surface, previewReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(final CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Flash is automatically enabled when necessary.
previewRequestBuilder.set(
CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(
previewRequest, captureCallback, backgroundHandler);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
@Override
public void onConfigureFailed(final CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
},
null);
} catch (final CameraAccessException e) {
LOGGER.e(e, "Exception!");
}
}
/**
* Configures the necessary {@link Matrix} transformation to `mTextureView`. This method should be
* called after the camera preview size is determined in setUpCameraOutputs and also the size of
* `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransform(final int viewWidth, final int viewHeight) {
final Activity activity = getActivity();
if (null == textureView || null == previewSize || null == activity) {
return;
}
final int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
final Matrix matrix = new Matrix();
final RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
final RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
final float centerX = viewRect.centerX();
final float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
final float scale =
Math.max(
(float) viewHeight / previewSize.getHeight(),
(float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
textureView.setTransform(matrix);
}
/**
* Callback for Activities to use to initialize their data once the selected preview size is
* known.
*/
public interface ConnectionCallback {
void onPreviewSizeChosen(Size size, int cameraRotation);
}
/** Compares two {@code Size}s based on their areas. */
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(final Size lhs, final Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum(
(long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight());
}
}
/** Shows an error message dialog. */
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(final String message) {
final ErrorDialog dialog = new ErrorDialog();
final Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@Override
public Dialog onCreateDialog(final Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(
android.R.string.ok,
new DialogInterface.OnClickListener() {
@Override
public void onClick(final DialogInterface dialogInterface, final int i) {
activity.finish();
}
})
.create();
}
}
}
package com.agenew.detection;
/*
* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.app.Fragment;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import java.io.IOException;
import java.util.List;
import com.agenew.detection.customview.AutoFitTextureView;
import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger;
public class LegacyCameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/** Conversion from screen rotation to JPEG orientation. */
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private Camera camera;
private Camera.PreviewCallback imageListener;
private Size desiredSize;
/** The layout identifier to inflate for this Fragment. */
private int layout;
/** An {@link AutoFitTextureView} for camera preview. */
private AutoFitTextureView textureView;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
* TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
Size[] sizes = new Size[cameraSizes.size()];
int i = 0;
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(90);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(s.height, s.width);
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {}
};
/** An additional thread for running tasks that shouldn't block the UI. */
private HandlerThread backgroundThread;
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (textureView.isAvailable()) {
camera.startPreview();
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
@Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
/** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/** Stops the background thread and its {@link Handler}. */
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
}
return -1; // No camera found
}
}
/*
* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agenew.detection;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.media.ImageReader.OnImageAvailableListener;
import android.os.SystemClock;
import android.util.Log;
import android.util.Size;
import android.util.TypedValue;
import android.widget.Toast;
import java.io.IOException;
import java.util.LinkedList;
import java.util.List;
import com.agenew.detection.customview.OverlayView;
import com.agenew.detection.customview.OverlayView.DrawCallback;
import com.agenew.detection.env.BorderedText;
import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger;
import com.agenew.detection.tflite.Classifier;
import com.agenew.detection.tflite.DetectorFactory;
import com.agenew.detection.tflite.YoloV5Classifier;
import com.agenew.detection.tracking.MultiBoxTracker;
/**
* An activity that uses a TensorFlowMultiBoxDetector and ObjectTracker to detect and then track
* objects.
*/
public class MainActivity extends CameraActivity implements OnImageAvailableListener {
private static final Logger LOGGER = new Logger();
private static final DetectorMode MODE = DetectorMode.TF_OD_API;
public static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.3f;
private static final boolean MAINTAIN_ASPECT = true;
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 640);
private static final boolean SAVE_PREVIEW_BITMAP = false;
private static final float TEXT_SIZE_DIP = 10;
OverlayView trackingOverlay;
private Integer sensorOrientation;
private YoloV5Classifier detector;
private long lastProcessingTimeMs;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
private Bitmap cropCopyBitmap = null;
private boolean computingDetection = false;
private long timestamp = 0;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
private MultiBoxTracker tracker;
private BorderedText borderedText;
@Override
public void onPreviewSizeChosen(final Size size, final int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
tracker = new MultiBoxTracker(this);
final int modelIndex = modelView.getCheckedItemPosition();
final String modelString = modelStrings.get(modelIndex);
final int deviceIndex = deviceView.getCheckedItemPosition();
String device = deviceStrings.get(deviceIndex);
try {
detector = DetectorFactory.getDetector(getAssets(), modelString);
} catch (final IOException e) {
e.printStackTrace();
LOGGER.e(e, "Exception initializing classifier!");
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show();
finish();
}
if (device.equals("CPU")) {
detector.useCPU();
} else if (device.equals("GPU")) {
detector.useGpu();
} else if (device.equals("NNAPI")) {
detector.useNNAPI();
}
int cropSize = detector.getInputSize();
previewWidth = size.getWidth();
previewHeight = size.getHeight();
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
trackingOverlay = (OverlayView) findViewById(R.id.tracking_overlay);
trackingOverlay.addCallback(
new DrawCallback() {
@Override
public void drawCallback(final Canvas canvas) {
tracker.draw(canvas);
if (isDebug()) {
tracker.drawDebug(canvas);
}
}
});
tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation);
}
protected void updateActiveModel() {
// Get UI information before delegating to background
final int modelIndex = modelView.getCheckedItemPosition();
final int deviceIndex = deviceView.getCheckedItemPosition();
String threads = threadsTextView.getText().toString().trim();
final int numThreads = Integer.parseInt(threads);
handler.post(() -> {
if (modelIndex == currentModel && deviceIndex == currentDevice
&& numThreads == currentNumThreads) {
return;
}
currentModel = modelIndex;
currentDevice = deviceIndex;
currentNumThreads = numThreads;
// Disable classifier while updating
if (detector != null) {
detector.close();
detector = null;
}
// Lookup names of parameters.
String modelString = modelStrings.get(modelIndex);
String device = deviceStrings.get(deviceIndex);
LOGGER.i("Changing model to " + modelString + " device " + device);
// Try to load model.
try {
detector = DetectorFactory.getDetector(getAssets(), modelString);
// Customize the interpreter to the type of device we want to use.
if (detector == null) {
return;
}
}
catch(IOException e) {
e.printStackTrace();
LOGGER.e(e, "Exception in updateActiveModel()");
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show();
finish();
}
if (device.equals("CPU")) {
detector.useCPU();
} else if (device.equals("GPU")) {
detector.useGpu();
} else if (device.equals("NNAPI")) {
detector.useNNAPI();
}
detector.setNumThreads(numThreads);
int cropSize = detector.getInputSize();
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
});
}
@Override
protected void processImage() {
++timestamp;
final long currTimestamp = timestamp;
trackingOverlay.postInvalidate();
// No mutex needed as this method is not reentrant.
if (computingDetection) {
readyForNextImage();
return;
}
computingDetection = true;
LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
readyForNextImage();
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
// For examining the actual TF input.
if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
@Override
public void run() {
LOGGER.i("Running detection on image " + currTimestamp);
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
Log.e("CHECK", "run: " + results.size());
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final Canvas canvas = new Canvas(cropCopyBitmap);
final Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Style.STROKE);
paint.setStrokeWidth(2.0f);
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
switch (MODE) {
case TF_OD_API:
minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
break;
}
final List<Classifier.Recognition> mappedRecognitions =
new LinkedList<Classifier.Recognition>();
for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation();
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas.drawRect(location, paint);
cropToFrameTransform.mapRect(location);
result.setLocation(location);
mappedRecognitions.add(result);
}
}
tracker.trackResults(mappedRecognitions, currTimestamp);
trackingOverlay.postInvalidate();
computingDetection = false;
runOnUiThread(
new Runnable() {
@Override
public void run() {
showFrameInfo(previewWidth + "x" + previewHeight);
showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
showInference(lastProcessingTimeMs + "ms");
}
});
}
});
}
@Override
protected int getLayoutId() {
return R.layout.tfe_od_camera_connection_fragment_tracking;
}
@Override
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
}
// Which detection model to use: by default uses Tensorflow Object Detection API frozen
// checkpoints.
private enum DetectorMode {
TF_OD_API;
}
@Override
protected void setUseNNAPI(final boolean isChecked) {
runInBackground(() -> detector.setUseNNAPI(isChecked));
}
@Override
protected void setNumThreads(final int numThreads) {
runInBackground(() -> detector.setNumThreads(numThreads));
}
}
/*
* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.agenew.detection.customview;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;
/** A {@link TextureView} that can be adjusted to a specified aspect ratio. */
public class AutoFitTextureView extends TextureView {
private int ratioWidth = 0;
private int ratioHeight = 0;
public AutoFitTextureView(final Context context) {
this(context, null);
}
public AutoFitTextureView(final Context context, final AttributeSet attrs) {
this(context, attrs, 0);
}
public AutoFitTextureView(final Context context, final AttributeSet attrs, final int defStyle) {
super(context, attrs, defStyle);
}
/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that is,
* calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(final int width, final int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
ratioWidth = width;
ratioHeight = height;
requestLayout();
}
@Override
protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
final int width = MeasureSpec.getSize(widthMeasureSpec);
final int height = MeasureSpec.getSize(heightMeasureSpec);
if (0 == ratioWidth || 0 == ratioHeight) {
setMeasuredDimension(width, height);
} else {
if (width < height * ratioWidth / ratioHeight) {
setMeasuredDimension(width, width * ratioHeight / ratioWidth);
} else {
setMeasuredDimension(height * ratioWidth / ratioHeight, height);
}
}
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.customview;
import android.content.Context;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.View;
import java.util.LinkedList;
import java.util.List;
/** A simple View providing a render callback to other classes. */
public class OverlayView extends View {
private final List<DrawCallback> callbacks = new LinkedList<DrawCallback>();
public OverlayView(final Context context, final AttributeSet attrs) {
super(context, attrs);
}
public void addCallback(final DrawCallback callback) {
callbacks.add(callback);
}
@Override
public synchronized void draw(final Canvas canvas) {
for (final DrawCallback callback : callbacks) {
callback.drawCallback(canvas);
}
}
/** Interface defining the callback for client classes. */
public interface DrawCallback {
public void drawCallback(final Canvas canvas);
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
import java.util.Vector;
/** A class that encapsulates the tedious bits of rendering legible, bordered text onto a canvas. */
public class BorderedText {
private final Paint interiorPaint;
private final Paint exteriorPaint;
private final float textSize;
/**
* Creates a left-aligned bordered text object with a white interior, and a black exterior with
* the specified text size.
*
* @param textSize text size in pixels
*/
public BorderedText(final float textSize) {
this(Color.WHITE, Color.BLACK, textSize);
}
/**
* Create a bordered text object with the specified interior and exterior colors, text size and
* alignment.
*
* @param interiorColor the interior text color
* @param exteriorColor the exterior text color
* @param textSize text size in pixels
*/
public BorderedText(final int interiorColor, final int exteriorColor, final float textSize) {
interiorPaint = new Paint();
interiorPaint.setTextSize(textSize);
interiorPaint.setColor(interiorColor);
interiorPaint.setStyle(Style.FILL);
interiorPaint.setAntiAlias(false);
interiorPaint.setAlpha(255);
exteriorPaint = new Paint();
exteriorPaint.setTextSize(textSize);
exteriorPaint.setColor(exteriorColor);
exteriorPaint.setStyle(Style.FILL_AND_STROKE);
exteriorPaint.setStrokeWidth(textSize / 8);
exteriorPaint.setAntiAlias(false);
exteriorPaint.setAlpha(255);
this.textSize = textSize;
}
public void setTypeface(Typeface typeface) {
interiorPaint.setTypeface(typeface);
exteriorPaint.setTypeface(typeface);
}
public void drawText(final Canvas canvas, final float posX, final float posY, final String text) {
canvas.drawText(text, posX, posY, exteriorPaint);
canvas.drawText(text, posX, posY, interiorPaint);
}
public void drawText(
final Canvas canvas, final float posX, final float posY, final String text, Paint bgPaint) {
float width = exteriorPaint.measureText(text);
float textSize = exteriorPaint.getTextSize();
Paint paint = new Paint(bgPaint);
paint.setStyle(Paint.Style.FILL);
paint.setAlpha(160);
canvas.drawRect(posX, (posY + (int) (textSize)), (posX + (int) (width)), posY, paint);
canvas.drawText(text, posX, (posY + textSize), interiorPaint);
}
public void drawLines(Canvas canvas, final float posX, final float posY, Vector<String> lines) {
int lineNum = 0;
for (final String line : lines) {
drawText(canvas, posX, posY - getTextSize() * (lines.size() - lineNum - 1), line);
++lineNum;
}
}
public void setInteriorColor(final int color) {
interiorPaint.setColor(color);
}
public void setExteriorColor(final int color) {
exteriorPaint.setColor(color);
}
public float getTextSize() {
return textSize;
}
public void setAlpha(final int alpha) {
interiorPaint.setAlpha(alpha);
exteriorPaint.setAlpha(alpha);
}
public void getTextBounds(
final String line, final int index, final int count, final Rect lineBounds) {
interiorPaint.getTextBounds(line, index, count, lineBounds);
}
public void setTextAlign(final Align align) {
interiorPaint.setTextAlign(align);
exteriorPaint.setTextAlign(align);
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Bitmap;
import android.graphics.Matrix;
import android.os.Environment;
import java.io.File;
import java.io.FileOutputStream;
/** Utility class for manipulating images. */
public class ImageUtils {
// This value is 2 ^ 18 - 1, and is used to clamp the RGB values before their ranges
// are normalized to eight bits.
static final int kMaxChannelValue = 262143;
@SuppressWarnings("unused")
private static final Logger LOGGER = new Logger();
/**
* Utility method to compute the allocated size in bytes of a YUV420SP image of the given
* dimensions.
*/
public static int getYUVByteSize(final int width, final int height) {
// The luminance plane requires 1 byte per pixel.
final int ySize = width * height;
// The UV plane works on 2x2 blocks, so dimensions with odd size must be rounded up.
// Each 2x2 block takes 2 bytes to encode, one each for U and V.
final int uvSize = ((width + 1) / 2) * ((height + 1) / 2) * 2;
return ySize + uvSize;
}
/**
* Saves a Bitmap object to disk for analysis.
*
* @param bitmap The bitmap to save.
*/
public static void saveBitmap(final Bitmap bitmap) {
saveBitmap(bitmap, "preview.png");
}
/**
* Saves a Bitmap object to disk for analysis.
*
* @param bitmap The bitmap to save.
* @param filename The location to save the bitmap to.
*/
public static void saveBitmap(final Bitmap bitmap, final String filename) {
final String root =
Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "tensorflow";
LOGGER.i("Saving %dx%d bitmap to %s.", bitmap.getWidth(), bitmap.getHeight(), root);
final File myDir = new File(root);
if (!myDir.mkdirs()) {
LOGGER.i("Make dir failed");
}
final String fname = filename;
final File file = new File(myDir, fname);
if (file.exists()) {
file.delete();
}
try {
final FileOutputStream out = new FileOutputStream(file);
bitmap.compress(Bitmap.CompressFormat.PNG, 99, out);
out.flush();
out.close();
} catch (final Exception e) {
LOGGER.e(e, "Exception!");
}
}
public static void convertYUV420SPToARGB8888(byte[] input, int width, int height, int[] output) {
final int frameSize = width * height;
for (int j = 0, yp = 0; j < height; j++) {
int uvp = frameSize + (j >> 1) * width;
int u = 0;
int v = 0;
for (int i = 0; i < width; i++, yp++) {
int y = 0xff & input[yp];
if ((i & 1) == 0) {
v = 0xff & input[uvp++];
u = 0xff & input[uvp++];
}
output[yp] = YUV2RGB(y, u, v);
}
}
}
private static int YUV2RGB(int y, int u, int v) {
// Adjust and check YUV values
y = (y - 16) < 0 ? 0 : (y - 16);
u -= 128;
v -= 128;
// This is the floating point equivalent. We do the conversion in integer
// because some Android devices do not have floating point in hardware.
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
int y1192 = 1192 * y;
int r = (y1192 + 1634 * v);
int g = (y1192 - 833 * v - 400 * u);
int b = (y1192 + 2066 * u);
// Clipping RGB values to be inside boundaries [ 0 , kMaxChannelValue ]
r = r > kMaxChannelValue ? kMaxChannelValue : (r < 0 ? 0 : r);
g = g > kMaxChannelValue ? kMaxChannelValue : (g < 0 ? 0 : g);
b = b > kMaxChannelValue ? kMaxChannelValue : (b < 0 ? 0 : b);
return 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
}
public static void convertYUV420ToARGB8888(
byte[] yData,
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out) {
int yp = 0;
for (int j = 0; j < height; j++) {
int pY = yRowStride * j;
int pUV = uvRowStride * (j >> 1);
for (int i = 0; i < width; i++) {
int uv_offset = pUV + (i >> 1) * uvPixelStride;
out[yp++] = YUV2RGB(0xff & yData[pY + i], 0xff & uData[uv_offset], 0xff & vData[uv_offset]);
}
}
}
/**
* Returns a transformation matrix from one reference frame into another. Handles cropping (if
* maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another. Must be a multiple
* of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
* cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
if (applyRotation % 90 != 0) {
LOGGER.w("Rotation of %d % 90 != 0", applyRotation);
}
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.util.Log;
import java.util.HashSet;
import java.util.Set;
/** Wrapper for the platform log function, allows convenient message prefixing and log disabling. */
public final class Logger {
private static final String DEFAULT_TAG = "tensorflow";
private static final int DEFAULT_MIN_LOG_LEVEL = Log.DEBUG;
// Classes to be ignored when examining the stack trace
private static final Set<String> IGNORED_CLASS_NAMES;
static {
IGNORED_CLASS_NAMES = new HashSet<String>(3);
IGNORED_CLASS_NAMES.add("dalvik.system.VMStack");
IGNORED_CLASS_NAMES.add("java.lang.Thread");
IGNORED_CLASS_NAMES.add(Logger.class.getCanonicalName());
}
private final String tag;
private final String messagePrefix;
private int minLogLevel = DEFAULT_MIN_LOG_LEVEL;
/**
* Creates a Logger using the class name as the message prefix.
*
* @param clazz the simple name of this class is used as the message prefix.
*/
public Logger(final Class<?> clazz) {
this(clazz.getSimpleName());
}
/**
* Creates a Logger using the specified message prefix.
*
* @param messagePrefix is prepended to the text of every message.
*/
public Logger(final String messagePrefix) {
this(DEFAULT_TAG, messagePrefix);
}
/**
* Creates a Logger with a custom tag and a custom message prefix. If the message prefix is set to
*
* <pre>null</pre>
*
* , the caller's class name is used as the prefix.
*
* @param tag identifies the source of a log message.
* @param messagePrefix prepended to every message if non-null. If null, the name of the caller is
* being used
*/
public Logger(final String tag, final String messagePrefix) {
this.tag = tag;
final String prefix = messagePrefix == null ? getCallerSimpleName() : messagePrefix;
this.messagePrefix = (prefix.length() > 0) ? prefix + ": " : prefix;
}
/** Creates a Logger using the caller's class name as the message prefix. */
public Logger() {
this(DEFAULT_TAG, null);
}
/** Creates a Logger using the caller's class name as the message prefix. */
public Logger(final int minLogLevel) {
this(DEFAULT_TAG, null);
this.minLogLevel = minLogLevel;
}
/**
* Return caller's simple name.
*
* <p>Android getStackTrace() returns an array that looks like this: stackTrace[0]:
* dalvik.system.VMStack stackTrace[1]: java.lang.Thread stackTrace[2]:
* com.google.android.apps.unveil.env.UnveilLogger stackTrace[3]:
* com.google.android.apps.unveil.BaseApplication
*
* <p>This function returns the simple version of the first non-filtered name.
*
* @return caller's simple name
*/
private static String getCallerSimpleName() {
// Get the current callstack so we can pull the class of the caller off of it.
final StackTraceElement[] stackTrace = Thread.currentThread().getStackTrace();
for (final StackTraceElement elem : stackTrace) {
final String className = elem.getClassName();
if (!IGNORED_CLASS_NAMES.contains(className)) {
// We're only interested in the simple name of the class, not the complete package.
final String[] classParts = className.split("\\.");
return classParts[classParts.length - 1];
}
}
return Logger.class.getSimpleName();
}
public void setMinLogLevel(final int minLogLevel) {
this.minLogLevel = minLogLevel;
}
public boolean isLoggable(final int logLevel) {
return logLevel >= minLogLevel || Log.isLoggable(tag, logLevel);
}
private String toMessage(final String format, final Object... args) {
return messagePrefix + (args.length > 0 ? String.format(format, args) : format);
}
public void v(final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args));
}
}
public void v(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.VERBOSE)) {
Log.v(tag, toMessage(format, args), t);
}
}
public void d(final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args));
}
}
public void d(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.DEBUG)) {
Log.d(tag, toMessage(format, args), t);
}
}
public void i(final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args));
}
}
public void i(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.INFO)) {
Log.i(tag, toMessage(format, args), t);
}
}
public void w(final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args));
}
}
public void w(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.WARN)) {
Log.w(tag, toMessage(format, args), t);
}
}
public void e(final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args));
}
}
public void e(final Throwable t, final String format, final Object... args) {
if (isLoggable(Log.ERROR)) {
Log.e(tag, toMessage(format, args), t);
}
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.env;
import android.graphics.Bitmap;
import android.text.TextUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/** Size class independent of a Camera object. */
public class Size implements Comparable<Size>, Serializable {
// 1.4 went out with this UID so we'll need to maintain it to preserve pending queries when
// upgrading.
public static final long serialVersionUID = 7689808733290872361L;
public final int width;
public final int height;
public Size(final int width, final int height) {
this.width = width;
this.height = height;
}
public Size(final Bitmap bmp) {
this.width = bmp.getWidth();
this.height = bmp.getHeight();
}
/**
* Rotate a size by the given number of degrees.
*
* @param size Size to rotate.
* @param rotation Degrees {0, 90, 180, 270} to rotate the size.
* @return Rotated size.
*/
public static Size getRotatedSize(final Size size, final int rotation) {
if (rotation % 180 != 0) {
// The phone is portrait, therefore the camera is sideways and frame should be rotated.
return new Size(size.height, size.width);
}
return size;
}
public static Size parseFromString(String sizeString) {
if (TextUtils.isEmpty(sizeString)) {
return null;
}
sizeString = sizeString.trim();
// The expected format is "<width>x<height>".
final String[] components = sizeString.split("x");
if (components.length == 2) {
try {
final int width = Integer.parseInt(components[0]);
final int height = Integer.parseInt(components[1]);
return new Size(width, height);
} catch (final NumberFormatException e) {
return null;
}
} else {
return null;
}
}
public static List<Size> sizeStringToList(final String sizes) {
final List<Size> sizeList = new ArrayList<Size>();
if (sizes != null) {
final String[] pairs = sizes.split(",");
for (final String pair : pairs) {
final Size size = Size.parseFromString(pair);
if (size != null) {
sizeList.add(size);
}
}
}
return sizeList;
}
public static String sizeListToString(final List<Size> sizes) {
String sizesString = "";
if (sizes != null && sizes.size() > 0) {
sizesString = sizes.get(0).toString();
for (int i = 1; i < sizes.size(); i++) {
sizesString += "," + sizes.get(i).toString();
}
}
return sizesString;
}
public static final String dimensionsAsString(final int width, final int height) {
return width + "x" + height;
}
public final float aspectRatio() {
return (float) width / (float) height;
}
@Override
public int compareTo(final Size other) {
return width * height - other.width * other.height;
}
@Override
public boolean equals(final Object other) {
if (other == null) {
return false;
}
if (!(other instanceof Size)) {
return false;
}
final Size otherSize = (Size) other;
return (width == otherSize.width && height == otherSize.height);
}
@Override
public int hashCode() {
return width * 32713 + height;
}
@Override
public String toString() {
return dimensionsAsString(width, height);
}
}
package com.agenew.detection.env;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.os.Environment;
import android.util.Log;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
public class Utils {
/**
* Memory-map the model file in Assets.
*/
public static MappedByteBuffer loadModelFile(AssetManager assets, String modelFilename)
throws IOException {
AssetFileDescriptor fileDescriptor = assets.openFd(modelFilename);
FileInputStream inputStream = new FileInputStream(fileDescriptor.getFileDescriptor());
FileChannel fileChannel = inputStream.getChannel();
long startOffset = fileDescriptor.getStartOffset();
long declaredLength = fileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY, startOffset, declaredLength);
}
public static void softmax(final float[] vals) {
float max = Float.NEGATIVE_INFINITY;
for (final float val : vals) {
max = Math.max(max, val);
}
float sum = 0.0f;
for (int i = 0; i < vals.length; ++i) {
vals[i] = (float) Math.exp(vals[i] - max);
sum += vals[i];
}
for (int i = 0; i < vals.length; ++i) {
vals[i] = vals[i] / sum;
}
}
public static float expit(final float x) {
return (float) (1. / (1. + Math.exp(-x)));
}
// public static Bitmap scale(Context context, String filePath) {
// AssetManager assetManager = context.getAssets();
//
// InputStream istr;
// Bitmap bitmap = null;
// try {
// istr = assetManager.open(filePath);
// bitmap = BitmapFactory.decodeStream(istr);
// bitmap = Bitmap.createScaledBitmap(bitmap, MainActivity.TF_OD_API_INPUT_SIZE, MainActivity.TF_OD_API_INPUT_SIZE, false);
// } catch (IOException e) {
// // handle exception
// Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
// }
//
// return bitmap;
// }
public static Bitmap getBitmapFromAsset(Context context, String filePath) {
AssetManager assetManager = context.getAssets();
InputStream istr;
Bitmap bitmap = null;
try {
istr = assetManager.open(filePath);
bitmap = BitmapFactory.decodeStream(istr);
// return bitmap.copy(Bitmap.Config.ARGB_8888,true);
} catch (IOException e) {
// handle exception
Log.e("getBitmapFromAsset", "getBitmapFromAsset: " + e.getMessage());
}
return bitmap;
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another.
* Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
* cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
public static Bitmap processBitmap(Bitmap source, int size){
int image_height = source.getHeight();
int image_width = source.getWidth();
Bitmap croppedBitmap = Bitmap.createBitmap(size, size, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransformations = getTransformationMatrix(image_width,image_height,size,size,0,false);
Matrix cropToFrameTransformations = new Matrix();
frameToCropTransformations.invert(cropToFrameTransformations);
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(source, frameToCropTransformations, null);
return croppedBitmap;
}
public static void writeToFile(String data, Context context) {
try {
String baseDir = Environment.getExternalStorageDirectory().getAbsolutePath();
String fileName = "myFile.txt";
File file = new File(baseDir + File.separator + fileName);
FileOutputStream stream = new FileOutputStream(file);
try {
stream.write(data.getBytes());
} finally {
stream.close();
}
} catch (IOException e) {
Log.e("Exception", "File write failed: " + e.toString());
}
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.tflite;
import android.graphics.Bitmap;
import android.graphics.RectF;
import java.util.List;
/**
* Generic interface for interacting with different recognition engines.
*/
public interface Classifier {
List<Recognition> recognizeImage(Bitmap bitmap);
void enableStatLogging(final boolean debug);
String getStatString();
void close();
void setNumThreads(int num_threads);
void setUseNNAPI(boolean isChecked);
abstract float getObjThresh();
/**
* An immutable result returned by a Classifier describing what was recognized.
*/
public class Recognition {
/**
* A unique identifier for what has been recognized. Specific to the class, not the instance of
* the object.
*/
private final String id;
/**
* Display name for the recognition.
*/
private final String title;
/**
* A sortable score for how good the recognition is relative to others. Higher should be better.
*/
private final Float confidence;
/**
* Optional location within the source image for the location of the recognized object.
*/
private RectF location;
private int detectedClass;
public Recognition(
final String id, final String title, final Float confidence, final RectF location) {
this.id = id;
this.title = title;
this.confidence = confidence;
this.location = location;
}
public Recognition(final String id, final String title, final Float confidence, final RectF location, int detectedClass) {
this.id = id;
this.title = title;
this.confidence = confidence;
this.location = location;
this.detectedClass = detectedClass;
}
public String getId() {
return id;
}
public String getTitle() {
return title;
}
public Float getConfidence() {
return confidence;
}
public RectF getLocation() {
return new RectF(location);
}
public void setLocation(RectF location) {
this.location = location;
}
public int getDetectedClass() {
return detectedClass;
}
public void setDetectedClass(int detectedClass) {
this.detectedClass = detectedClass;
}
@Override
public String toString() {
String resultString = "";
if (id != null) {
resultString += "[" + id + "] ";
}
if (title != null) {
resultString += title + " ";
}
if (confidence != null) {
resultString += String.format("(%.1f%%) ", confidence * 100.0f);
}
if (location != null) {
resultString += location + " ";
}
return resultString.trim();
}
}
}
package com.agenew.detection.tflite;
import android.content.res.AssetManager;
import java.io.IOException;
public class DetectorFactory {
public static YoloV5Classifier getDetector(
final AssetManager assetManager,
final String modelFilename)
throws IOException {
String labelFilename = null;
boolean isQuantized = false;
int inputSize = 0;
int[] output_width = new int[]{0};
int[][] masks = new int[][]{{0}};
int[] anchors = new int[]{0};
if (modelFilename.endsWith(".tflite")) {
labelFilename = "file:///android_asset/class.txt";
isQuantized = modelFilename.endsWith("-int8.tflite");
inputSize = 640;
output_width = new int[]{80, 40, 20};
masks = new int[][]{{0, 1, 2}, {3, 4, 5}, {6, 7, 8}};
anchors = new int[]{
10,13, 16,30, 33,23, 30,61, 62,45, 59,119, 116,90, 156,198, 373,326
};
}
return YoloV5Classifier.create(assetManager, modelFilename, labelFilename, isQuantized,
inputSize);
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.tflite;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.graphics.RectF;
import android.os.Build;
import android.util.Log;
//import org.tensorflow.lite.Interpreter;
import com.mediatek.neuropilot_S.Interpreter;
//import org.tensorflow.lite.Tensor;
import com.mediatek.neuropilot_S.Tensor;
import com.agenew.detection.MainActivity;
import com.agenew.detection.env.Logger;
import com.agenew.detection.env.Utils;
//import org.tensorflow.lite.gpu.GpuDelegate;
//import org.tensorflow.lite.nnapi.NnApiDelegate;
import com.mediatek.neuropilot_S.nnapi.NnApiDelegate;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Map;
import java.util.PriorityQueue;
import java.util.Vector;
/**
* Wrapper for frozen detection models trained using the Tensorflow Object Detection API:
* - https://github.com/tensorflow/models/tree/master/research/object_detection
* where you can find the training code.
* <p>
* To use pretrained models in the API or convert to TF Lite models, please see docs for details:
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/detection_model_zoo.md
* - https://github.com/tensorflow/models/blob/master/research/object_detection/g3doc/running_on_mobile_tensorflowlite.md#running-our-model-on-android
*/
public class YoloV5Classifier implements Classifier {
private static final String TAG = "YoloV5Classifier";
/**
* Initializes a native TensorFlow session for classifying images.
*
* @param assetManager The asset manager to be used to load assets.
* @param modelFilename The filepath of the model GraphDef protocol buffer.
* @param labelFilename The filepath of label file for classes.
* @param isQuantized Boolean representing model is quantized or not
*/
public static YoloV5Classifier create(
final AssetManager assetManager,
final String modelFilename,
final String labelFilename,
final boolean isQuantized,
final int inputSize
/*final int[] output_width,
final int[][] masks,
final int[] anchors*/)
throws IOException {
final YoloV5Classifier d = new YoloV5Classifier();
String actualFilename = labelFilename.split("file:///android_asset/")[1];
InputStream labelsInput = assetManager.open(actualFilename);
BufferedReader br = new BufferedReader(new InputStreamReader(labelsInput));
String line;
while ((line = br.readLine()) != null) {
LOGGER.w(line);
d.labels.add(line);
}
br.close();
try {
Interpreter.Options options = (new Interpreter.Options());
options.setNumThreads(NUM_THREADS);
if (isNNAPI) {
d.nnapiDelegate = null;
// Initialize interpreter with NNAPI delegate for Android Pie or above
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.P) {
d.nnapiDelegate = new NnApiDelegate();
options.addDelegate(d.nnapiDelegate);
options.setNumThreads(NUM_THREADS);
// options.setUseNNAPI(false);
// options.setAllowFp16PrecisionForFp32(true);
// options.setAllowBufferHandleOutput(true);
options.setUseNNAPI(true);
}
}
/*
if (isGPU) {
GpuDelegate.Options gpu_options = new GpuDelegate.Options();
gpu_options.setPrecisionLossAllowed(true); // It seems that the default is true
gpu_options.setInferencePreference(GpuDelegate.Options.INFERENCE_PREFERENCE_SUSTAINED_SPEED);
d.gpuDelegate = new GpuDelegate(gpu_options);
options.addDelegate(d.gpuDelegate);
}*/
d.tfliteModel = Utils.loadModelFile(assetManager, modelFilename);
d.tfLite = new Interpreter(d.tfliteModel, options);
} catch (Exception e) {
throw new RuntimeException(e);
}
d.isModelQuantized = isQuantized;
// Pre-allocate buffers.
int numBytesPerChannel;
if (isQuantized) {
numBytesPerChannel = 1; // Quantized
} else {
numBytesPerChannel = 4; // Floating point
}
d.INPUT_SIZE = inputSize;
d.imgData = ByteBuffer.allocateDirect(1 * d.INPUT_SIZE * d.INPUT_SIZE * 3 * numBytesPerChannel);
d.imgData.order(ByteOrder.nativeOrder());
d.intValues = new int[d.INPUT_SIZE * d.INPUT_SIZE];
d.output_box = (int) ((Math.pow((inputSize / 32), 2) + Math.pow((inputSize / 16), 2) + Math.pow((inputSize / 8), 2)) * 3);
// d.OUTPUT_WIDTH = output_width;
// d.MASKS = masks;
// d.ANCHORS = anchors;
if (d.isModelQuantized){
Tensor inpten = d.tfLite.getInputTensor(0);
d.inp_scale = inpten.quantizationParams().getScale();
d.inp_zero_point = inpten.quantizationParams().getZeroPoint();
Tensor oupten = d.tfLite.getOutputTensor(0);
d.output_box = oupten.shape()[1];
d.oup_scale = oupten.quantizationParams().getScale();
d.oup_zero_point = oupten.quantizationParams().getZeroPoint();
}
int[] shape = d.tfLite.getOutputTensor(0).shape();
int numClass = shape[shape.length - 1] - 5;
d.numClass = numClass;
d.outData = ByteBuffer.allocateDirect(d.output_box * (numClass + 5) * numBytesPerChannel);
d.outData.order(ByteOrder.nativeOrder());
return d;
}
public int getInputSize() {
return INPUT_SIZE;
}
@Override
public void enableStatLogging(final boolean logStats) {
}
@Override
public String getStatString() {
return "";
}
@Override
public void close() {
tfLite.close();
tfLite = null;
/*
if (gpuDelegate != null) {
gpuDelegate.close();
gpuDelegate = null;
}*/
if (nnapiDelegate != null) {
nnapiDelegate.close();
nnapiDelegate = null;
}
tfliteModel = null;
}
public void setNumThreads(int num_threads) {
if (tfLite != null) tfLite.setNumThreads(num_threads);
}
@Override
public void setUseNNAPI(boolean isChecked) {
// if (tfLite != null) tfLite.setUseNNAPI(isChecked);
}
private void recreateInterpreter() {
if (tfLite != null) {
tfLite.close();
tfLite = new Interpreter(tfliteModel, tfliteOptions);
}
}
public void useGpu() {
/*
if (gpuDelegate == null) {
gpuDelegate = new GpuDelegate();
tfliteOptions.addDelegate(gpuDelegate);
recreateInterpreter();
}*/
}
public void useCPU() {
recreateInterpreter();
}
public void useNNAPI() {
nnapiDelegate = new NnApiDelegate();
tfliteOptions.addDelegate(nnapiDelegate);
recreateInterpreter();
}
@Override
public float getObjThresh() {
return MainActivity.MINIMUM_CONFIDENCE_TF_OD_API;
}
private static final Logger LOGGER = new Logger();
// Float model
private final float IMAGE_MEAN = 0;
private final float IMAGE_STD = 255.0f;
//config yolo
private int INPUT_SIZE = -1;
// private int[] OUTPUT_WIDTH;
// private int[][] MASKS;
// private int[] ANCHORS;
private int output_box;
private static final float[] XYSCALE = new float[]{1.2f, 1.1f, 1.05f};
private static final int NUM_BOXES_PER_BLOCK = 3;
// Number of threads in the java app
private static final int NUM_THREADS = 1;
private static boolean isNNAPI = false;
private static boolean isGPU = false;
private boolean isModelQuantized;
/** holds a gpu delegate */
// GpuDelegate gpuDelegate = null;
/** holds an nnapi delegate */
NnApiDelegate nnapiDelegate = null;
/** The loaded TensorFlow Lite model. */
private MappedByteBuffer tfliteModel;
/** Options for configuring the Interpreter. */
private final Interpreter.Options tfliteOptions = new Interpreter.Options();
// Config values.
// Pre-allocated buffers.
private Vector<String> labels = new Vector<String>();
private int[] intValues;
private ByteBuffer imgData;
private ByteBuffer outData;
private Interpreter tfLite;
private float inp_scale;
private int inp_zero_point;
private float oup_scale;
private int oup_zero_point;
private int numClass;
private YoloV5Classifier() {
}
//non maximum suppression
protected ArrayList<Recognition> nms(ArrayList<Recognition> list) {
ArrayList<Recognition> nmsList = new ArrayList<Recognition>();
for (int k = 0; k < labels.size(); k++) {
//1.find max confidence per class
PriorityQueue<Recognition> pq =
new PriorityQueue<Recognition>(
50,
new Comparator<Recognition>() {
@Override
public int compare(final Recognition lhs, final Recognition rhs) {
// Intentionally reversed to put high confidence at the head of the queue.
return Float.compare(rhs.getConfidence(), lhs.getConfidence());
}
});
for (int i = 0; i < list.size(); ++i) {
if (list.get(i).getDetectedClass() == k) {
pq.add(list.get(i));
}
}
//2.do non maximum suppression
while (pq.size() > 0) {
//insert detection with max confidence
Recognition[] a = new Recognition[pq.size()];
Recognition[] detections = pq.toArray(a);
Recognition max = detections[0];
nmsList.add(max);
pq.clear();
for (int j = 1; j < detections.length; j++) {
Recognition detection = detections[j];
RectF b = detection.getLocation();
if (box_iou(max.getLocation(), b) < mNmsThresh) {
pq.add(detection);
}
}
}
}
return nmsList;
}
protected float mNmsThresh = 0.6f;
protected float box_iou(RectF a, RectF b) {
return box_intersection(a, b) / box_union(a, b);
}
protected float box_intersection(RectF a, RectF b) {
float w = overlap((a.left + a.right) / 2, a.right - a.left,
(b.left + b.right) / 2, b.right - b.left);
float h = overlap((a.top + a.bottom) / 2, a.bottom - a.top,
(b.top + b.bottom) / 2, b.bottom - b.top);
if (w < 0 || h < 0) return 0;
float area = w * h;
return area;
}
protected float box_union(RectF a, RectF b) {
float i = box_intersection(a, b);
float u = (a.right - a.left) * (a.bottom - a.top) + (b.right - b.left) * (b.bottom - b.top) - i;
return u;
}
protected float overlap(float x1, float w1, float x2, float w2) {
float l1 = x1 - w1 / 2;
float l2 = x2 - w2 / 2;
float left = l1 > l2 ? l1 : l2;
float r1 = x1 + w1 / 2;
float r2 = x2 + w2 / 2;
float right = r1 < r2 ? r1 : r2;
return right - left;
}
protected static final int BATCH_SIZE = 1;
protected static final int PIXEL_SIZE = 3;
/**
* Writes Image data into a {@code ByteBuffer}.
*/
protected ByteBuffer convertBitmapToByteBuffer(Bitmap bitmap) {
// ByteBuffer byteBuffer = ByteBuffer.allocateDirect(4 * BATCH_SIZE * INPUT_SIZE * INPUT_SIZE * PIXEL_SIZE);
// byteBuffer.order(ByteOrder.nativeOrder());
// int[] intValues = new int[INPUT_SIZE * INPUT_SIZE];
bitmap.getPixels(intValues, 0, bitmap.getWidth(), 0, 0, bitmap.getWidth(), bitmap.getHeight());
int pixel = 0;
imgData.rewind();
for (int i = 0; i < INPUT_SIZE; ++i) {
for (int j = 0; j < INPUT_SIZE; ++j) {
int pixelValue = intValues[i * INPUT_SIZE + j];
if (isModelQuantized) {
// Quantized model
imgData.put((byte) ((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
imgData.put((byte) ((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
imgData.put((byte) (((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD / inp_scale + inp_zero_point));
} else { // Float model
imgData.putFloat((((pixelValue >> 16) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.putFloat((((pixelValue >> 8) & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
imgData.putFloat(((pixelValue & 0xFF) - IMAGE_MEAN) / IMAGE_STD);
}
}
}
return imgData;
}
public ArrayList<Recognition> recognizeImage(Bitmap bitmap) {
ByteBuffer byteBuffer_ = convertBitmapToByteBuffer(bitmap);
Map<Integer, Object> outputMap = new HashMap<>();
// float[][][] outbuf = new float[1][output_box][labels.size() + 5];
outData.rewind();
outputMap.put(0, outData);
Log.d("YoloV5Classifier", "mObjThresh: " + getObjThresh());
Object[] inputArray = {imgData};
tfLite.runForMultipleInputsOutputs(inputArray, outputMap);
ByteBuffer byteBuffer = (ByteBuffer) outputMap.get(0);
byteBuffer.rewind();
ArrayList<Recognition> detections = new ArrayList<Recognition>();
float[][][] out = new float[1][output_box][numClass + 5];
Log.d("YoloV5Classifier", "out[0] detect start");
for (int i = 0; i < output_box; ++i) {
for (int j = 0; j < numClass + 5; ++j) {
if (isModelQuantized){
out[0][i][j] = oup_scale * (((int) byteBuffer.get() & 0xFF) - oup_zero_point);
}
else {
out[0][i][j] = byteBuffer.getFloat();
}
}
// Denormalize xywh
for (int j = 0; j < 4; ++j) {
out[0][i][j] *= getInputSize();
}
}
for (int i = 0; i < output_box; ++i){
final int offset = 0;
final float confidence = out[0][i][4];
int detectedClass = -1;
float maxClass = 0;
final float[] classes = new float[labels.size()];
for (int c = 0; c < labels.size(); ++c) {
classes[c] = out[0][i][5 + c];
}
for (int c = 0; c < labels.size(); ++c) {
if (classes[c] > maxClass) {
detectedClass = c;
maxClass = classes[c];
}
}
final float confidenceInClass = maxClass * confidence;
if (confidenceInClass > getObjThresh()) {
final float xPos = out[0][i][0];
final float yPos = out[0][i][1];
final float w = out[0][i][2];
final float h = out[0][i][3];
Log.d("YoloV5Classifier",
Float.toString(xPos) + ',' + yPos + ',' + w + ',' + h);
final RectF rect =
new RectF(
Math.max(0, xPos - w / 2),
Math.max(0, yPos - h / 2),
Math.min(bitmap.getWidth() - 1, xPos + w / 2),
Math.min(bitmap.getHeight() - 1, yPos + h / 2));
detections.add(new Recognition("" + offset, labels.get(detectedClass),
confidenceInClass, rect, detectedClass));
}
}
Log.d(TAG, "detect end");
final ArrayList<Recognition> recognitions = nms(detections);
// final ArrayList<Recognition> recognitions = detections;
return recognitions;
}
public boolean checkInvalidateBox(float x, float y, float width, float height, float oriW, float oriH, int intputSize) {
// (1) (x, y, w, h) --> (xmin, ymin, xmax, ymax)
float halfHeight = height / 2.0f;
float halfWidth = width / 2.0f;
float[] pred_coor = new float[]{x - halfWidth, y - halfHeight, x + halfWidth, y + halfHeight};
// (2) (xmin, ymin, xmax, ymax) -> (xmin_org, ymin_org, xmax_org, ymax_org)
float resize_ratioW = 1.0f * intputSize / oriW;
float resize_ratioH = 1.0f * intputSize / oriH;
float resize_ratio = resize_ratioW > resize_ratioH ? resize_ratioH : resize_ratioW; //min
float dw = (intputSize - resize_ratio * oriW) / 2;
float dh = (intputSize - resize_ratio * oriH) / 2;
pred_coor[0] = 1.0f * (pred_coor[0] - dw) / resize_ratio;
pred_coor[2] = 1.0f * (pred_coor[2] - dw) / resize_ratio;
pred_coor[1] = 1.0f * (pred_coor[1] - dh) / resize_ratio;
pred_coor[3] = 1.0f * (pred_coor[3] - dh) / resize_ratio;
// (3) clip some boxes those are out of range
pred_coor[0] = pred_coor[0] > 0 ? pred_coor[0] : 0;
pred_coor[1] = pred_coor[1] > 0 ? pred_coor[1] : 0;
pred_coor[2] = pred_coor[2] < (oriW - 1) ? pred_coor[2] : (oriW - 1);
pred_coor[3] = pred_coor[3] < (oriH - 1) ? pred_coor[3] : (oriH - 1);
if ((pred_coor[0] > pred_coor[2]) || (pred_coor[1] > pred_coor[3])) {
pred_coor[0] = 0;
pred_coor[1] = 0;
pred_coor[2] = 0;
pred_coor[3] = 0;
}
// (4) discard some invalid boxes
float temp1 = pred_coor[2] - pred_coor[0];
float temp2 = pred_coor[3] - pred_coor[1];
float temp = temp1 * temp2;
if (temp < 0) {
Log.e("checkInvalidateBox", "temp < 0");
return false;
}
if (Math.sqrt(temp) > Float.MAX_VALUE) {
Log.e("checkInvalidateBox", "temp max");
return false;
}
return true;
}
}
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
package com.agenew.detection.tracking;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Paint.Cap;
import android.graphics.Paint.Join;
import android.graphics.Paint.Style;
import android.graphics.RectF;
import android.text.TextUtils;
import android.util.Pair;
import android.util.TypedValue;
import java.util.LinkedList;
import java.util.List;
import java.util.Queue;
import com.agenew.detection.env.BorderedText;
import com.agenew.detection.env.ImageUtils;
import com.agenew.detection.env.Logger;
import com.agenew.detection.tflite.Classifier.Recognition;
/** A tracker that handles non-max suppression and matches existing objects to new detections. */
public class MultiBoxTracker {
private static final float TEXT_SIZE_DIP = 18;
private static final float MIN_SIZE = 16.0f;
private static final int[] COLORS = {
Color.BLUE,
Color.RED,
Color.GREEN,
Color.YELLOW,
Color.CYAN,
Color.MAGENTA,
Color.WHITE,
Color.parseColor("#55FF55"),
Color.parseColor("#FFA500"),
Color.parseColor("#FF8888"),
Color.parseColor("#AAAAFF"),
Color.parseColor("#FFFFAA"),
Color.parseColor("#55AAAA"),
Color.parseColor("#AA33AA"),
Color.parseColor("#0D0068")
};
final List<Pair<Float, RectF>> screenRects = new LinkedList<Pair<Float, RectF>>();
private final Logger logger = new Logger();
private final Queue<Integer> availableColors = new LinkedList<Integer>();
private final List<TrackedRecognition> trackedObjects = new LinkedList<TrackedRecognition>();
private final Paint boxPaint = new Paint();
private final float textSizePx;
private final BorderedText borderedText;
private Matrix frameToCanvasMatrix;
private int frameWidth;
private int frameHeight;
private int sensorOrientation;
public MultiBoxTracker(final Context context) {
for (final int color : COLORS) {
availableColors.add(color);
}
boxPaint.setColor(Color.RED);
boxPaint.setStyle(Style.STROKE);
boxPaint.setStrokeWidth(10.0f);
boxPaint.setStrokeCap(Cap.ROUND);
boxPaint.setStrokeJoin(Join.ROUND);
boxPaint.setStrokeMiter(100);
textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, context.getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
}
public synchronized void setFrameConfiguration(
final int width, final int height, final int sensorOrientation) {
frameWidth = width;
frameHeight = height;
this.sensorOrientation = sensorOrientation;
}
public synchronized void drawDebug(final Canvas canvas) {
final Paint textPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setTextSize(60.0f);
final Paint boxPaint = new Paint();
boxPaint.setColor(Color.RED);
boxPaint.setAlpha(200);
boxPaint.setStyle(Style.STROKE);
for (final Pair<Float, RectF> detection : screenRects) {
final RectF rect = detection.second;
canvas.drawRect(rect, boxPaint);
canvas.drawText("" + detection.first, rect.left, rect.top, textPaint);
borderedText.drawText(canvas, rect.centerX(), rect.centerY(), "" + detection.first);
}
}
public synchronized void trackResults(final List<Recognition> results, final long timestamp) {
logger.i("Processing %d results from %d", results.size(), timestamp);
processResults(results);
}
private Matrix getFrameToCanvasMatrix() {
return frameToCanvasMatrix;
}
public synchronized void draw(final Canvas canvas) {
final boolean rotated = sensorOrientation % 180 == 90;
final float multiplier =
Math.min(
canvas.getHeight() / (float) (rotated ? frameWidth : frameHeight),
canvas.getWidth() / (float) (rotated ? frameHeight : frameWidth));
frameToCanvasMatrix =
ImageUtils.getTransformationMatrix(
frameWidth,
frameHeight,
(int) (multiplier * (rotated ? frameHeight : frameWidth)),
(int) (multiplier * (rotated ? frameWidth : frameHeight)),
sensorOrientation,
false);
for (final TrackedRecognition recognition : trackedObjects) {
final RectF trackedPos = new RectF(recognition.location);
getFrameToCanvasMatrix().mapRect(trackedPos);
boxPaint.setColor(recognition.color);
float cornerSize = Math.min(trackedPos.width(), trackedPos.height()) / 8.0f;
canvas.drawRoundRect(trackedPos, cornerSize, cornerSize, boxPaint);
final String labelString =
!TextUtils.isEmpty(recognition.title)
? String.format("%s %.2f", recognition.title, (100 * recognition.detectionConfidence))
: String.format("%.2f", (100 * recognition.detectionConfidence));
// borderedText.drawText(canvas, trackedPos.left + cornerSize, trackedPos.top,
// labelString);
borderedText.drawText(
canvas, trackedPos.left + cornerSize, trackedPos.top, labelString + "%", boxPaint);
}
}
private void processResults(final List<Recognition> results) {
final List<Pair<Float, Recognition>> rectsToTrack = new LinkedList<Pair<Float, Recognition>>();
screenRects.clear();
final Matrix rgbFrameToScreen = new Matrix(getFrameToCanvasMatrix());
for (final Recognition result : results) {
if (result.getLocation() == null) {
continue;
}
final RectF detectionFrameRect = new RectF(result.getLocation());
final RectF detectionScreenRect = new RectF();
rgbFrameToScreen.mapRect(detectionScreenRect, detectionFrameRect);
logger.v(
"Result! Frame: " + result.getLocation() + " mapped to screen:" + detectionScreenRect);
screenRects.add(new Pair<Float, RectF>(result.getConfidence(), detectionScreenRect));
if (detectionFrameRect.width() < MIN_SIZE || detectionFrameRect.height() < MIN_SIZE) {
logger.w("Degenerate rectangle! " + detectionFrameRect);
continue;
}
rectsToTrack.add(new Pair<Float, Recognition>(result.getConfidence(), result));
}
trackedObjects.clear();
if (rectsToTrack.isEmpty()) {
logger.v("Nothing to track, aborting.");
return;
}
for (final Pair<Float, Recognition> potential : rectsToTrack) {
final TrackedRecognition trackedRecognition = new TrackedRecognition();
trackedRecognition.detectionConfidence = potential.first;
trackedRecognition.location = new RectF(potential.second.getLocation());
trackedRecognition.title = potential.second.getTitle();
// trackedRecognition.color = COLORS[trackedObjects.size() % COLORS.length];
trackedRecognition.color = COLORS[potential.second.getDetectedClass() % COLORS.length];
trackedObjects.add(trackedRecognition);
// if (trackedObjects.size() >= COLORS.length) {
// break;
// }
}
}
private static class TrackedRecognition {
RectF location;
float detectionConfidence;
int color;
String title;
}
}
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillType="evenOdd"
android:pathData="M32,64C32,64 38.39,52.99 44.13,50.95C51.37,48.37 70.14,49.57 70.14,49.57L108.26,87.69L108,109.01L75.97,107.97L32,64Z"
android:strokeColor="#00000000"
android:strokeWidth="1">
<aapt:attr name="android:fillColor">
<gradient
android:endX="78.5885"
android:endY="90.9159"
android:startX="48.7653"
android:startY="61.0927"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0"/>
<item
android:color="#00000000"
android:offset="1.0"/>
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M66.94,46.02L66.94,46.02C72.44,50.07 76,56.61 76,64L32,64C32,56.61 35.56,50.11 40.98,46.06L36.18,41.19C35.45,40.45 35.45,39.3 36.18,38.56C36.91,37.81 38.05,37.81 38.78,38.56L44.25,44.05C47.18,42.57 50.48,41.71 54,41.71C57.48,41.71 60.78,42.57 63.68,44.05L69.11,38.56C69.84,37.81 70.98,37.81 71.71,38.56C72.44,39.3 72.44,40.45 71.71,41.19L66.94,46.02ZM62.94,56.92C64.08,56.92 65,56.01 65,54.88C65,53.76 64.08,52.85 62.94,52.85C61.8,52.85 60.88,53.76 60.88,54.88C60.88,56.01 61.8,56.92 62.94,56.92ZM45.06,56.92C46.2,56.92 47.13,56.01 47.13,54.88C47.13,53.76 46.2,52.85 45.06,52.85C43.92,52.85 43,53.76 43,54.88C43,56.01 43.92,56.92 45.06,56.92Z"
android:strokeColor="#00000000"
android:strokeWidth="1"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<selector xmlns:android="http://schemas.android.com/apk/res/android">
<!-- pressed -->
<item android:drawable="@color/selection_highlight" android:state_pressed="true" />
<!-- focused -->
<item android:drawable="@color/selection_focus" android:state_activated="true" />
<!-- default -->
<item android:drawable="@color/item_normal" />
</selector>
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:shape="rectangle">
<corners
android:topLeftRadius="@dimen/tfe_bottom_sheet_corner_radius"
android:topRightRadius="@dimen/tfe_bottom_sheet_corner_radius" />
<padding android:top="@dimen/tfe_bottom_sheet_top_padding" />
<solid android:color="@android:color/white" />
</shape>
\ No newline at end of file
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path
android:fillColor="#FF000000"
android:pathData="M19,13h-6v6h-2v-6H5v-2h6V5h2v6h6v2z"/>
</vector>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="24dp"
android:height="24dp"
android:viewportWidth="24"
android:viewportHeight="24">
<path
android:fillColor="#FF000000"
android:pathData="M19,13H5v-2h14v2z"/>
</vector>
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportHeight="108"
android:viewportWidth="108">
<path
android:fillColor="#26A69A"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeColor="#33FFFFFF"
android:strokeWidth="0.8" />
</vector>
<?xml version="1.0" encoding="UTF-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/listview_background_shape">
<stroke
android:width="1dp"
android:color="@android:color/darker_gray" />
<padding
android:bottom="2dp"
android:left="2dp"
android:right="2dp"
android:top="2dp" />
<solid android:color="#ffffffff" />
</shape>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextView
android:id="@+id/deviceview_row_text"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginRight="2dp"
android:padding="10dp"
android:textSize="18sp"
android:background="@drawable/item_selector"
android:textStyle="bold" />
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<TextView
android:id="@+id/listview_row_text"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_marginRight="2dp"
android:padding="10dp"
android:textSize="18sp"
android:background="@drawable/item_selector"
android:textStyle="bold" />
</LinearLayout>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<androidx.coordinatorlayout.widget.CoordinatorLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#00000000">
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="@android:color/black"
android:orientation="vertical">
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:id="@+id/container"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="org.tensorflow.demo.CameraActivity" />
</RelativeLayout>
<include
android:id="@+id/bottom_sheet_layout"
layout="@layout/tfe_od_layout_bottom_sheet" />
</androidx.coordinatorlayout.widget.CoordinatorLayout>
<?xml version="1.0" encoding="utf-8"?><!--
Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.agenew.detection.customview.AutoFitTextureView
android:id="@+id/texture"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<com.agenew.detection.customview.OverlayView
android:id="@+id/tracking_overlay"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/bottom_sheet_layout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_gravity="center_horizontal"
android:background="@drawable/bottom_sheet_bg"
android:gravity="center_horizontal"
android:orientation="vertical"
android:padding="8dp"
app:behavior_hideable="true"
app:layout_behavior="com.google.android.material.bottomsheet.BottomSheetBehavior">
<LinearLayout
android:id="@+id/gesture_layout"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="vertical"
android:paddingTop="10dp"
android:paddingBottom="20dp">
<ImageView
android:id="@+id/bottom_sheet_arrow"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center"
android:src="@drawable/icn_chevron_up" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/frame"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Frame"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/frame_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/crop"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Crop"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/crop_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<LinearLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal">
<TextView
android:id="@+id/inference"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Inference Time"
android:textColor="@android:color/black" />
<TextView
android:id="@+id/inference_info"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="right"
android:text="640*480"
android:textColor="@android:color/black" />
</LinearLayout>
<View
android:layout_width="match_parent"
android:layout_height="1px"
android:layout_marginTop="10dp"
android:background="@android:color/darker_gray" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:orientation="horizontal"
android:visibility="visible">
<TextView
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:text="Threads"
android:textColor="@android:color/black" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentRight="true"
android:background="@drawable/rectangle"
android:gravity="center"
android:orientation="horizontal"
android:padding="4dp">
<ImageView
android:id="@+id/minus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/ic_baseline_remove" />
<TextView
android:id="@+id/threads"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginLeft="10dp"
android:layout_marginRight="10dp"
android:text="1"
android:textColor="@android:color/black"
android:textSize="14sp" />
<ImageView
android:id="@+id/plus"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:src="@drawable/ic_baseline_add" />
</LinearLayout>
</RelativeLayout>
<View
android:layout_width="match_parent"
android:layout_height="1px"
android:layout_marginTop="10dp"
android:background="@android:color/darker_gray" />
<!-- TODO(b/112541284): Unhide when new NNAPI delegate is used. -->
<LinearLayout
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="horizontal">
<ListView
android:id="@+id/device_list"
android:layout_width="100dp"
android:layout_height="wrap_content" />
<ListView
android:id="@+id/model_list"
android:layout_width="match_parent"
android:layout_height="wrap_content" />
</LinearLayout>
<View
android:layout_width="match_parent"
android:layout_height="1px"
android:layout_marginTop="10dp"
android:background="@android:color/darker_gray" />
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@android:color/white"/>
<foreground android:drawable="@mipmap/ic_launcher_foreground"/>
</adaptive-icon>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="tfe_color_primary">#ffa800</color>
<color name="tfe_color_primary_dark">#ff6f00</color>
<color name="tfe_color_accent">#425066</color>
<color name="tfe_semi_transparent">#66000000</color>
<color name="control_background">#cc4285f4</color>
<color name="selection_highlight">#aaaaaa</color>
<color name="selection_focus">#eeaa55</color>
<color name="item_normal">#eeeeee</color>
</resources>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<dimen name="tfe_bottom_sheet_corner_radius">15dp</dimen>
<dimen name="tfe_bottom_sheet_top_padding">8dp</dimen>
</resources>
\ No newline at end of file
<resources>
<string name="tfe_od_app_name" translation_description="Object Detection demo app [CHAR_LIMIT=40]">YOLOv5_Android_TFLite</string>
<string name="tfe_od_camera_error" translation_description="Error regarding camera support[CHAR_LIMIT=40]">This device doesn\'t support Camera2 API.</string>
</resources>
<resources>
<!-- Base application theme. -->
<style name="AppTheme.ObjectDetection" parent="Theme.AppCompat.Light.NoActionBar">
<!-- Customize your theme here. -->
<item name="colorPrimary">@color/tfe_color_primary</item>
<item name="colorPrimaryDark">@color/tfe_color_primary_dark</item>
<item name="colorAccent">@color/tfe_color_accent</item>
</style>
</resources>
// Top-level build file where you can add configuration options common to all sub-projects/modules.
plugins {
alias(libs.plugins.androidApplication) apply false
}
\ No newline at end of file
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. For more details, visit
# https://developer.android.com/r/tools/gradle-multi-project-decoupled-projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app's APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true
\ No newline at end of file
[versions]
agp = "8.3.2"
junit = "4.13.2"
junitVersion = "1.1.5"
espressoCore = "3.5.1"
appcompat = "1.6.1"
material = "1.11.0"
[libraries]
junit = { group = "junit", name = "junit", version.ref = "junit" }
ext-junit = { group = "androidx.test.ext", name = "junit", version.ref = "junitVersion" }
espresso-core = { group = "androidx.test.espresso", name = "espresso-core", version.ref = "espressoCore" }
appcompat = { group = "androidx.appcompat", name = "appcompat", version.ref = "appcompat" }
material = { group = "com.google.android.material", name = "material", version.ref = "material" }
[plugins]
androidApplication = { id = "com.android.application", version.ref = "agp" }
No preview for this file type
#Wed Apr 10 09:00:02 CST 2024
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
#!/usr/bin/env sh
#
# Copyright 2015 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn () {
echo "$*"
}
die () {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
NONSTOP* )
nonstop=true
;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin or MSYS, switch paths to Windows format before running java
if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=`expr $i + 1`
done
case $i in
0) set -- ;;
1) set -- "$args0" ;;
2) set -- "$args0" "$args1" ;;
3) set -- "$args0" "$args1" "$args2" ;;
4) set -- "$args0" "$args1" "$args2" "$args3" ;;
5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Escape application args
save () {
for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
echo " "
}
APP_ARGS=`save "$@"`
# Collect all arguments for the java command, following the shell quoting and substitution rules
eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
exec "$JAVACMD" "$@"
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
pluginManagement {
repositories {
google {
content {
includeGroupByRegex("com\\.android.*")
includeGroupByRegex("com\\.google.*")
includeGroupByRegex("androidx.*")
}
}
mavenCentral()
gradlePluginPortal()
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
}
}
rootProject.name = "yolov5_android_tflite"
include ':app'
Markdown is supported
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!