If the requirement is to run only a single model for detections, then this approach can also be used. But we still recommend the approach described in Basic Usage.
-
To build applications, you will use a camera or other real-time image stream instead of static resources.
LSDCamera2Controller
encapsulates the configuration and opening of the device's camera via the Camera2 API. Additionally LSDCamera2Preview
is available for displaying a camera preview.
Note
Your application is responsible for ensuring that camera runtime permission has been granted before attempting to use LSDCamera2Controller
.
4.1. Basic setup of LSDCamera2Controller
and LSDCamera2Preview
:
class CameraActivity : AppCompatActivity() {
private lateinit var cameraController: LSDCamera2Controller
private val imageListener = object : LSDCamera2ImageListener {
override fun onImageAvailable(image: Image?) {
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_camera)
cameraController = LSDCamera2Controller(applicationContext)
cameraController.initialize(LSDCamera2Config().apply {
this.targetStreamResolution = Size(1280, 960)
})
cameraController.setImageListener(WeakReference(imageListener))
val preview = findViewById<LSDCamera2Preview>(R.id.camera_preview)
preview.initialize(cameraController)
}
override fun onResume() {
super.onResume()
if (checkSelfPermission(Manifest.permission.CAMERA) == PERMISSION_GRANTED) {
cameraController.start()
}
}
override fun onPause() {
super.onPause()
cameraController.stop()
}
}
4.2. Layout of CameraActivity
:
```xml
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.here.see.livesense.ar_lib.camera2.LSDCamera2Preview
android:id="@+id/camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
```
-
Prepare data to be passed for recognitions to Live Sense SDK:
public class RecognitionFromCamera implements ImageReader.OnImageAvailableListener {
private HandlerThread detectionThread;
private Handler detectionHandler;
private RoadBasicsModel roadBasicsModel;
private volatile boolean isProcessingFrame = false;
private volatile boolean isModelLoaded = false;
private int previewWidth;
private int previewHeight;
private int sensorOrientation;
private int rotation;
private byte[][] yuvBytes = new byte[3][];
private int[] rgbBytes;
private Bitmap bitmap;
public void init(int previewWidth, int previewHeight, int sensorOrientation) {
this.previewWidth = previewWidth;
this.previewHeight = previewHeight;
this.sensorOrientation = sensorOrientation;
detectionThread = new HandlerThread("detection");
detectionThread.start();
detectionHandler = new Handler(detectionThread.getLooper());
detectionHandler.post(() -> {
try {
RoadBasicsModel.Options rbOptions = new RoadBasicsModel.Options();
rbOptions.setEnableTrafficLightStatus(true);
roadBasicsModel = new RoadBasicsModel(rbOptions);
roadBasicsModel.addClassMinConfidence("pedestrian", 0.45f);
isModelLoaded = true;
} catch (IOException e) {
} catch (AuthorizationException e) {
}
rgbBytes = new int[previewWidth * previewHeight];
bitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
});
}
public void close() {
if (detectionThread != null) {
detectionThread.quitSafely();
detectionThread = null;
detectionHandler = null;
}
if (roadBasicsModel != null) {
roadBasicsModel.close();
roadBasicsModel = null;
}
isModelLoaded = false;
}
public void setDeviceRotation(int deviceRotation) {
this.rotation = (this.sensorOrientation - deviceRotation + 360) % 360;
}
@Override
public void onImageAvailable(final ImageReader reader) {
final Image image = reader.acquireLatestImage();
if (image == null || !isModelLoaded) {
return;
}
if (isProcessingFrame) {
image.close();
return;
}
isProcessingFrame = true;
final Image.Plane[] planes = image.getPlanes();
final int yRowStride = planes[0].getRowStride();
final int uvRowStride = planes[1].getRowStride();
final int uvPixelStride = planes[1].getPixelStride();
for (int i = 0; i < planes.length; ++i) {
final ByteBuffer buffer = planes[i].getBuffer();
if (yuvBytes[i] == null) {
yuvBytes[i] = new byte[buffer.capacity()];
}
buffer.get(yuvBytes[i]);
}
image.close();
detectionHandler.post(() -> {
ImageUtils.convertYUV420ToARGB8888(
yuvBytes[0],
yuvBytes[1],
yuvBytes[2],
previewWidth,
previewHeight,
yRowStride,
uvRowStride,
uvPixelStride,
rgbBytes);
ImageUtils.argb8888ToBitmap(rgbBytes, previewWidth, previewHeight, bitmap);
List<Recognition> recognitions = roadBasicsModel.recognizeImage(bitmap, rotation, 0.6f);
for (Recognition recognition : recognitions) {
Log.d("RoadBasics", recognition.getTitle() + " at "
+ recognition.getLocation() + " with confidence " + recognition.getConfidence());
}
isProcessingFrame = false;
});
}
}
-
Helper methods to convert raw data into bitmap.
ImageUtils.argb8888ToBitmap(int[] argb, int width, int height);
ImageUtils.argb8888ToBitmap(int[] argb, int width, int height, Bitmap output);
ImageUtils.argb8888ToBitmap(byte[] argb, int width, int height);
ImageUtils.argb8888ToBitmap(byte[] argb, Bitmap output);
ImageUtils.convertYUV420ToARGB8888(
byte[] yData,
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out)
ImageUtils.convertYUV420SPToARGB8888(
byte[] input,
int width,
int height,
int[] output)
-
Run a detection call:
public void runDetection() {
Bitmap image = BitmapFactory.decodeResource(this.getResources(), R.drawable.test_image);
List<Recognition> detections = roadBasicsModel.recognizeImage(image, 0, 0.6f);
}
-
It is recommended to call the LiveSenseEngine.getInstance().onResume()
method when you come to the foreground from the background to resume the services paused while in the background. Call the LiveSenseEngine.getInstance().onPause()
method when the application goes to background. It helps to stop the services which saves the device memory and battery usage.
@Override
protected void onPause() {
super.onPause();
LiveSenseEngine.getInstance().onPause();
}
@Override
protected void onResume() {
super.onResume();
LiveSenseEngine.getInstance().onResume();
}
-
Release the resources when exiting the application.
@Override
protected void onDestroy() {
super.onDestroy();
roadBasicsModel.close();
}
The Live Sense models are not thread-safe and should be both initialized and executed on the same thread. Utilizing multiple threads for the same model instance may result in unexpected behavior.
Models may be executed in parallel, but each model instance can only handle one image at a time. Executing a model before the previous call has completed will result in an exception.