The most basic use of Live Sense SDK includes the detection of cars, pedestrians, signs, and other supported objects in a still image. For details on what can be detected by each model, see Models.
Live Sense SDK provides the ability to run multiple models as well as a single model by following the steps below:
-
Create an Application
class and initialize LiveSenseEngine
inside:
LiveSenseEngine liveSenseEngine = LiveSenseEngine.getInstance();
liveSenseEngine.initialize(this.getApplicationContext());
-
In your Activity
class, ask the end-user for consent. This is required for non self-serve users only.
public class CameraActivity extends Activity {
private LSModelManagerParallel modelManager;
private ManagerListener recognitionListener;
private LSTrackerManagerImpl trackerManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
LiveSenseEngine liveSenseEngine;
liveSenseEngine = LiveSenseEngine.getInstance();
liveSenseEngine.requestConsent(this, new LiveSenseEngine.LSDConsentCallback() {
@Override
public void onAccept() {
initModels();
}
@Override
public void onRefuse() {
initModels();
}
});
}
}
-
Upon receiving a consent response, initialize the model manager LSModelManagerParallel
and tracker manager LSTrackerManagerImpl
.
LSModelManagerParallel
will take care of running all the required models parallely. LSTrackerManagerImpl
is required to track the recognitions.
Note
- You can also write your own implementation of the model manager class that must implement
LSModelManager
. - You can also write your own implementation of the tracker manager class that must implement
LSTrackerManager
.
public void initModels() {
modelManager = new LSModelManagerParallel(recognitionListener);
trackerManager = new LSTrackerManagerImpl();
modelManager.setTrackerManager(trackerManager);
int roadBasicsModelId = modelManager.addModel(LiveSenseModel.ROAD_BASICS, new RoadBasicsModel.Options(), 0.6f);
modelManager.addClassMinConfidence(LSClassLabel.PEDESTRIAN, 0.40f);
RoadBasicsModel.Options rbOptions = new RoadBasicsModel.Options();
rbOptions.setEnableTrafficLightStatus(true);
modelManager.reloadModel(roadBasicsModelId, rbOptions);
recognitionListener = new ManagerListener() {
@Override
public void onError(int modelId, Throwable throwable) {
LOGGER.e("Error in inference with modelId: " +i+". \n"+ throwable.getMessage());
}
@Override
public void onRecognitions(int modelId, int imageId, List<Recognition> list, long runTime) {
List<Recognition> recognitions = list;
String tag = "";
if (modelId == roadBasicsModelId) {
tag = "RoadBasics";
}
for (Recognition recognition : recognitions) {
Log.d(tag, recognition.getTitle() + " at "
+ recognition.getLocation() + " with confidence " + recognition.getConfidence());
}
List<TrackedRecognition> trackedRecognitions = trackerManager.getTrackedObjects();
}
};
}
-
Add required models to modelManager
and prepare data to be passed for recognitions to Live Sense SDK.
public class CameraActivity extends Activity {
private volatile boolean isModelLoaded = false;
private int sensorOrientation;
private int rotation;
private LSModelManager modelManager;
private ManagerListener recognitionListener;
private LSTrackerManager trackerManager;
private int roadBasicsModelId;
private static final String TAG = "CameraActivity";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_camera);
initModels();
}
public void initModels() {
modelManager = new LSModelManagerParallel(recognitionListener);
trackerManager = new LSTrackerManagerImpl();
modelManager.setTrackerManager(trackerManager);
roadBasicsModelId = modelManager.addModel(LiveSenseModel.ROAD_BASICS, new RoadBasicsModel.Options(), 0.6f);
modelManager.addClassMinConfidence(LSClassLabel.PEDESTRIAN, 0.40f);
RoadBasicsModel.Options rbOptions = new RoadBasicsModel.Options();
rbOptions.setEnableTrafficLightStatus(true);
modelManager.reloadModel(roadBasicsModelId, rbOptions);
isModelLoaded = true;
}
public void close() {
if (modelManager != null) {
modelManager.close();
modelManager = null;
}
if (trackerManager != null) {
trackerManager.close();
trackerManager = null;
}
isModelLoaded = false;
}
public void setDeviceRotation(int deviceRotation) {
this.rotation = (this.sensorOrientation - deviceRotation + 360) % 360;
}
private LSModelManager.ManagerListener recognitionListener = new LSModelManager.ManagerListener() {
@Override
public void onError(int modelId, Throwable throwable) {
Log.e(TAG, "Error in inference with modelId: " + modelId +". \n"+ throwable.getMessage());
}
@Override
public void onRecognitions(int modelId, int imageId, List<Recognition> list, long runTime) {
List<Recognition> recognitions = list;
String tag = "";
if (modelId == roadBasicsModelId) {
tag = "RoadBasics";
}
for (Recognition recognition : recognitions) {
Log.d(tag, recognition.getTitle() + " at "
+ recognition.getLocation() + " with confidence " + recognition.getConfidence());
}
List<TrackedRecognition> trackedRecognitions = trackerManager.getTrackedObjects();
}
};
private final LSDCamera2ImageListener imageAvailableCallback = (Image image) -> {
if (image == null || !isModelLoaded) {
Log.w(TAG, "Manager not initialized.");
return;
}
if (image.getFormat() != ImageFormat.YUV_420_888) {
Log.w(TAG, "Unsupported image format.");
image.close();
return;
}
modelManager.offerImage(image, rotation);
};
}
-
To build applications, use a camera or other real-time image stream devices. Avoid using static resources.
LSDCamera2Controller
encapsulates the configuration and opening of the device's camera via the Camera2 API. Additionally LSDCamera2Preview
is available for displaying a camera preview.
Note
Your application is responsible for ensuring that camera runtime permission has been granted before attempting to use LSDCamera2Controller
.
5.1. Basic setup of LSDCamera2Controller
and LSDCamera2Preview
:
class CameraActivity : AppCompatActivity() {
private lateinit var cameraController: LSDCamera2Controller
private val imageListener = object : LSDCamera2ImageListener {
override fun onImageAvailable(image: Image?) {
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_camera)
cameraController = LSDCamera2Controller(applicationContext)
cameraController.initialize(LSDCamera2Config().apply {
this.targetStreamResolution = Size(1280, 960)
})
cameraController.setImageListener(WeakReference(imageListener))
val preview = findViewById<LSDCamera2Preview>(R.id.camera_preview)
preview.initialize(cameraController)
}
override fun onResume() {
super.onResume()
if (checkSelfPermission(Manifest.permission.CAMERA) == PERMISSION_GRANTED) {
cameraController.start()
}
}
override fun onPause() {
super.onPause()
cameraController.stop()
}
}
5.2. Layout of CameraActivity
:
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<com.here.see.livesense.ar_lib.camera2.LSDCamera2Preview
android:id="@+id/camera_preview"
android:layout_width="match_parent"
android:layout_height="match_parent" />
</FrameLayout>
-
Helper methods to convert raw data into bitmap.
ImageUtils.argb8888ToBitmap(int[] argb, int width, int height);
ImageUtils.argb8888ToBitmap(int[] argb, int width, int height, Bitmap output);
ImageUtils.argb8888ToBitmap(byte[] argb, int width, int height);
ImageUtils.argb8888ToBitmap(byte[] argb, Bitmap output);
ImageUtils.convertYUV420ToARGB8888(
byte[] yData,
byte[] uData,
byte[] vData,
int width,
int height,
int yRowStride,
int uvRowStride,
int uvPixelStride,
int[] out)
ImageUtils.convertYUV420SPToARGB8888(
byte[] input,
int width,
int height,
int[] output)
-
Test a detection call using a static image:
public void runDetection() {
BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false;
Bitmap image = BitmapFactory.decodeResource(this.getResources(), R.drawable.test_image, options);
modelManager.offerImage(image, 0);
List<TrackedRecognition> trackedRecognitions = trackerManager.getTrackedObjects();
}
-
It is recommended to call the following SDK lifecycle methods from the application activity lifecycle:
LiveSenseEngine.getInstance().onResume()
-
LiveSenseEngine.getInstance().onPause()
It helps to manage the SDK resources which saves the device memory and battery usage.
@Override
protected void onPause() {
super.onPause();
LiveSenseEngine.getInstance().onPause();
}
@Override
protected void onResume() {
super.onResume();
LiveSenseEngine.getInstance().onResume();
}
-
Release the resources when exiting the application.
@Override
protected void onDestroy() {
super.onDestroy();
modelManager.close();
modelManager = null;
}