How to Implement Android QR Scanner with NDK Camera2 API

Pre-requisites

About the C++ API of Dynamsoft Android SDK

Getting Started with Android NDK Camera2 Samples

  • Basic: create a native activity to draw camera frames on the native window.
  • Texture-view: draw camera frames on texture view.

How to Implement Camera Preview in Native Activity

  1. Create an Android Activity that extends NativeActivity in Java:
public class CameraActivity extends NativeActivity {}
extern "C" void android_main(struct android_app* state) {
CameraEngine engine(state);
pEngineObj = &engine;

state->userData = reinterpret_cast<void*>(&engine);
state->onAppCmd = ProcessAndroidCmd;

// loop waiting for stuff to do.
while (1) {
// Read all pending events.
int events;
struct android_poll_source* source;

while (ALooper_pollAll(0, NULL, &events, (void**)&source) >= 0) {
// Process this event.
if (source != NULL) {
source->process(state, source);
}

// Check if we are exiting.
if (state->destroyRequested != 0) {
LOGI("CameraEngine thread destroy requested!");
engine.DeleteCamera();
pEngineObj = nullptr;
return;
}
}
pEngineObj->DrawFrame();
}
}
void CameraEngine::DrawFrame(void) {
if (!cameraReady_ || !yuvReader_) return;
AImage* image = yuvReader_->GetNextImage();
if (!image) {
return;
}

ANativeWindow_acquire(app_->window);
ANativeWindow_Buffer buf;
if (ANativeWindow_lock(app_->window, &buf, nullptr) < 0) {
yuvReader_->DeleteImage(image);
return;
}

yuvReader_->DisplayImage(&buf, image);
ANativeWindow_unlockAndPost(app_->window);
ANativeWindow_release(app_->window);
}

Android TextureView Example

<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerHorizontal="true"
android:layout_centerVertical="true">
<TextureView
android:id="@+id/texturePreview"
android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>

</FrameLayout>
private void createTextureView() {
textureView_ = (TextureView) findViewById(R.id.texturePreview);
textureView_.setSurfaceTextureListener(this);
if (textureView_.isAvailable()) {
onSurfaceTextureAvailable(textureView_.getSurfaceTexture(),
textureView_.getWidth(), textureView_.getHeight());
}
}

public void onSurfaceTextureAvailable(SurfaceTexture surface,
int width, int height) {
createNativeCamera();

resizeTextureView(width, height);
surface.setDefaultBufferSize(cameraPreviewSize_.getWidth(),
cameraPreviewSize_.getHeight());
surface_ = new Surface(surface);
onPreviewSurfaceCreated(ndkCamera_, surface_);
scheduleTask();
}
extern "C" JNIEXPORT void JNICALL
Java_com_sample_textureview_ViewActivity_onPreviewSurfaceCreated(
JNIEnv *env, jobject instance, jlong ndkCameraObj, jobject surface) {
ASSERT(ndkCameraObj && (jlong)pEngineObj == ndkCameraObj,
"NativeObject should not be null Pointer");
CameraAppEngine *pApp = reinterpret_cast<CameraAppEngine *>(ndkCameraObj);
pApp->CreateCameraSession(surface);
pApp->StartPreview(true);
}

camera_->CreateSession(ANativeWindow_fromSurface(env_, surface));

void NDKCamera::CreateSession(ANativeWindow* previewWindow) {
requests_[PREVIEW_REQUEST_IDX].outputNativeWindow_ = previewWindow;
requests_[PREVIEW_REQUEST_IDX].template_ = TEMPLATE_PREVIEW;

CALL_CONTAINER(create(&outputContainer_));
for (auto& req : requests_) {
if (!req.outputNativeWindow_) continue;

ANativeWindow_acquire(req.outputNativeWindow_);
CALL_OUTPUT(create(req.outputNativeWindow_, &req.sessionOutput_));
CALL_CONTAINER(add(outputContainer_, req.sessionOutput_));
CALL_TARGET(create(req.outputNativeWindow_, &req.target_));
CALL_DEV(createCaptureRequest(cameras_[activeCameraId_].device_,
req.template_, &req.request_));
CALL_REQUEST(addTarget(req.request_, req.target_));
}

captureSessionState_ = CaptureSessionState::READY;
CALL_DEV(createCaptureSession(cameras_[activeCameraId_].device_,
outputContainer_, GetSessionListener(),
&captureSession_));
}

Combining TextureView and ImageReader

mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
new CameraCaptureSession.StateCallback() {

@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == mCameraDevice) {
return;
}

mCaptureSession = cameraCaptureSession;
startPreview();
}

@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
showToast("Failed");
}
}, null
);
void NDKCamera::CreateSession(ANativeWindow* textureViewWindow, ANativeWindow* imgReaderWindow) {
auto& req = requests_[PREVIEW_REQUEST_IDX];
req.outputNativeWindow_ = textureViewWindow;
req.yuvWindow = imgReaderWindow;
req.template_ = TEMPLATE_PREVIEW;

ACaptureSessionOutputContainer_create(&outputContainer_);
CALL_DEV(createCaptureRequest(cameras_[activeCameraId_].device_,
req.template_, &req.request_));

// Add the texture view surface to the container
ANativeWindow_acquire(req.outputNativeWindow_);
CALL_OUTPUT(create(req.outputNativeWindow_, &req.sessionOutput_));
CALL_CONTAINER(add(outputContainer_, req.sessionOutput_));
CALL_TARGET(create(req.outputNativeWindow_, &req.target_));
CALL_REQUEST(addTarget(req.request_, req.target_));

// Add the image reader surface to the container
ANativeWindow_acquire(req.yuvWindow);
CALL_OUTPUT(create(req.yuvWindow, &req.yuvOutput));
CALL_CONTAINER(add(outputContainer_, req.yuvOutput));
CALL_TARGET(create(req.yuvWindow, &req.yuvTarget));
CALL_REQUEST(addTarget(req.request_, req.yuvTarget));

captureSessionState_ = CaptureSessionState::READY;
ACameraDevice_createCaptureSession(cameras_[activeCameraId_].device_,
outputContainer_, GetSessionListener(),
&captureSession_);
}
void NDKCamera::StartPreview(bool start) {
if (start) {
ACaptureRequest* requests[] = { requests_[PREVIEW_REQUEST_IDX].request_};
ACameraCaptureSession_setRepeatingRequest(captureSession_, nullptr, 1,
requests,
nullptr);
} else if (!start && captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);
}
}
2021-12-14 08:42:20.316 24536-24556/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 13, request ID 0, subseq ID 0
2021-12-14 08:42:21.319 24536-24556/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 14, request ID 0, subseq ID 0
2021-12-14 08:42:22.321 24536-24584/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 15, request ID 0, subseq ID 0
2021-12-14 08:42:23.323 24536-24584/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 16, request ID 0, subseq ID 0
2021-12-14 08:42:24.325 24536-24556/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 17, request ID 0, subseq ID 0
2021-12-14 08:42:25.328 24536-24584/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 18, request ID 0, subseq ID 0
2021-12-14 08:42:26.330 24536-24584/com.sample.textureview D/ACameraDevice: Device error received, code 3, frame number 19, request ID 0, subseq ID 0
yuvReader_ = new ImageReader(&compatibleCameraRes_, AIMAGE_FORMAT_YUV_420_888);
camera_->CreateSession(ANativeWindow_fromSurface(env_, surface), yuvReader_->GetNativeWindow());

void NDKCamera::CreateSession(ANativeWindow* previewWindow, ANativeWindow* yuvWindow) {
// Create output from this app's ANativeWindow, and add into output container
requests_[PREVIEW_REQUEST_IDX].outputNativeWindow_ = previewWindow;
requests_[PREVIEW_REQUEST_IDX].template_ = TEMPLATE_PREVIEW;
requests_[YUV_REQUEST_IDX].outputNativeWindow_ = yuvWindow;
requests_[YUV_REQUEST_IDX].template_ = TEMPLATE_PREVIEW;

CALL_CONTAINER(create(&outputContainer_));
for (auto& req : requests_) {
if (!req.outputNativeWindow_) continue;

ANativeWindow_acquire(req.outputNativeWindow_);
CALL_OUTPUT(create(req.outputNativeWindow_, &req.sessionOutput_));
CALL_CONTAINER(add(outputContainer_, req.sessionOutput_));
CALL_TARGET(create(req.outputNativeWindow_, &req.target_));
CALL_DEV(createCaptureRequest(cameras_[activeCameraId_].device_,
req.template_, &req.request_));
CALL_REQUEST(addTarget(req.request_, req.target_));
}

// Create a capture session for the given preview request
captureSessionState_ = CaptureSessionState::READY;
CALL_DEV(createCaptureSession(cameras_[activeCameraId_].device_,
outputContainer_, GetSessionListener(),
&captureSession_));
}

void NDKCamera::StartPreview(bool start) {
if (start) {
ACaptureRequest* requests[] = { requests_[PREVIEW_REQUEST_IDX].request_, requests_[YUV_REQUEST_IDX].request_};
CALL_SESSION(setRepeatingRequest(captureSession_, nullptr, 2,
requests,
nullptr));
} else if (!start && captureSessionState_ == CaptureSessionState::ACTIVE) {
ACameraCaptureSession_stopRepeating(captureSession_);
}
}
void NDKCamera::CreateSession(ANativeWindow* previewWindow,
ANativeWindow* jpgWindow, bool manualPreview,
int32_t imageRotation) {
requests_[PREVIEW_REQUEST_IDX].outputNativeWindow_ = previewWindow;
requests_[PREVIEW_REQUEST_IDX].template_ = TEMPLATE_PREVIEW;
requests_[JPG_CAPTURE_REQUEST_IDX].outputNativeWindow_ = jpgWindow;
requests_[JPG_CAPTURE_REQUEST_IDX].template_ = TEMPLATE_STILL_CAPTURE;

CALL_CONTAINER(create(&outputContainer_));
for (auto& req : requests_) {
if (!req.outputNativeWindow_) continue;

ANativeWindow_acquire(req.outputNativeWindow_);
CALL_OUTPUT(create(req.outputNativeWindow_, &req.sessionOutput_));
CALL_CONTAINER(add(outputContainer_, req.sessionOutput_));
CALL_TARGET(create(req.outputNativeWindow_, &req.target_));
CALL_DEV(createCaptureRequest(cameras_[activeCameraId_].device_,
req.template_, &req.request_));
CALL_REQUEST(addTarget(req.request_, req.target_));
}

// Create a capture session for the given preview request
captureSessionState_ = CaptureSessionState::READY;
CALL_DEV(createCaptureSession(cameras_[activeCameraId_].device_,
outputContainer_, GetSessionListener(),
&captureSession_));

if (jpgWindow) {
ACaptureRequest_setEntry_i32(requests_[JPG_CAPTURE_REQUEST_IDX].request_,
ACAMERA_JPEG_ORIENTATION, 1, &imageRotation);
}
}
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_centerHorizontal="true"
android:layout_centerVertical="true">
<TextureView
android:id="@+id/texturePreview"
android:layout_gravity="center"
android:layout_width="wrap_content"
android:layout_height="wrap_content"/>
<TextView
android:id="@+id/textView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_gravity="center_vertical"
android:textSize="10pt"
android:textColor="@android:color/white"/>
<ImageButton
android:id="@+id/takePhoto"
android:layout_width="80dp"
android:layout_height="60dp"
android:layout_gravity="bottom|center"
android:src = "@drawable/camera_button"
android:background="@android:color/transparent"
android:adjustViewBounds ="true"
android:scaleType="fitCenter"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="60dp"/>

</FrameLayout>
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
onWindowFocusChanged(true);
setContentView(R.layout.activity_main);
textView = findViewById(R.id.textView);
_takePhoto = (ImageButton)findViewById(R.id.takePhoto);
_takePhoto.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
scanPhoto(ndkCamera_);
}
});
if (isCamera2Device()) {
RequestCamera();
} else {
Log.e("CameraSample", "Found legacy camera device, this sample needs camera2 device");
}
}
void ImageReader::ImageCallback(AImageReader *reader) {
int32_t format;
media_status_t status = AImageReader_getFormat(reader, &format);
ASSERT(status == AMEDIA_OK, "Failed to get the media format");
if (format == AIMAGE_FORMAT_JPEG) {
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
ASSERT(status == AMEDIA_OK && image, "Image is not available");

}
else if (format == AIMAGE_FORMAT_YUV_420_888) {

}
}

Linking Third-party Shared Library in Android Project

target_include_directories(camera_textureview PRIVATE ${COMMON_SOURCE_DIR} ./dbr)

add_library(DynamsoftBarcodeReaderAndroid
SHARED
IMPORTED)
set_target_properties( # Specifies the target library.
DynamsoftBarcodeReaderAndroid

# Specifies the parameter you want to define.
PROPERTIES IMPORTED_LOCATION

# Provides the path to the library you want to import.
${CMAKE_CURRENT_SOURCE_DIR}/dbr/libDynamsoftBarcodeReaderAndroid.so)

target_link_libraries(camera_textureview dl android log m camera2ndk mediandk DynamsoftBarcodeReaderAndroid)

Making a QR Scanner

void ImageReader::DecodeImage(AImage* image) {

int planeCount;
media_status_t status = AImage_getNumberOfPlanes(image, &planeCount);
ASSERT(status == AMEDIA_OK && planeCount == 1,
"Error: getNumberOfPlanes() planeCount = %d", planeCount);
uint8_t *data = nullptr;
int len = 0;
AImage_getPlaneData(image, 0, &data, &len);

DBR_DecodeFileInMemory(barcode_reader, data, len, "");
TextResultArray *handler = NULL;
DBR_GetAllTextResults(barcode_reader, &handler);
TextResult **results = handler->results;
int count = handler->resultsCount;
std::string out = "No QR Detected";
if (count > 0)
{
out = "";
for (int index = 0; index < count; index++)
{
out += "Index: " + std::to_string(index) + "\n";;
out += "Barcode format: " + std::string(results[index]->barcodeFormatString) + "\n";
out += "Barcode value: " + std::string(results[index]->barcodeText) + "\n";
out += "\n";

}
DBR_FreeTextResults(&handler);
}

if (callback_) {
LOGI("QR detection %s ", out.c_str());
callback_(callbackCtx_, out.c_str());
}
AImage_delete(image);
}

void ImageReader::ImageCallback(AImageReader *reader) {
int32_t format;
media_status_t status = AImageReader_getFormat(reader, &format);
ASSERT(status == AMEDIA_OK, "Failed to get the media format");
if (format == AIMAGE_FORMAT_JPEG) {
AImage *image = nullptr;
media_status_t status = AImageReader_acquireNextImage(reader, &image);
ASSERT(status == AMEDIA_OK && image, "Image is not available");

std::thread decodeQRHandler(&ImageReader::DecodeImage, this, image);
decodeQRHandler.detach();
}
}
void CameraAppEngine::OnQRDetected(const char *result) {
JNIEnv* env;

jvm->AttachCurrentThread(&env, nullptr);
jmethodID methodID = env->GetMethodID(globalClass, "onQRDetected", "(Ljava/lang/String;)V");
jstring javaName = env->NewStringUTF(result);

env->CallVoidMethod(javaInstance_, methodID, javaName);
jvm->DetachCurrentThread();
}

void CameraAppEngine::CreateCameraSession(jobject surface) {
surface_ = env_->NewGlobalRef(surface);
jpgReader_ = new ImageReader(&compatibleCameraRes_, AIMAGE_FORMAT_JPEG);
jpgReader_->SetPresentRotation(GetCameraSensorOrientation(ACAMERA_LENS_FACING_BACK));
jpgReader_->RegisterCallback(this, [this](void* ctx, const char* str) -> void {
reinterpret_cast<CameraAppEngine* >(ctx)->OnQRDetected(str);
});
camera_->CreateSession(ANativeWindow_fromSurface(env_, surface), jpgReader_->GetNativeWindow(), false, GetCameraSensorOrientation(ACAMERA_LENS_FACING_BACK));
}
Timer timer = new Timer();

public void scheduleTask() {
timer.schedule(new TimerTask() {
@Override
public void run() {
if (ndkCamera_ != 0 && surface_ != null) {
scanPhoto(ndkCamera_);
}
}
}, 0);
}

public void onQRDetected(String result) {
final String content = result;
runOnUiThread(new Runnable() {
@Override
public void run() {
textView.setText(content);
scheduleTask();
}
});
}

Is It Worth Migrating Camera2 Code from Java to C++?

Related Article

References

Source Code

--

--

--

Manager of Dynamsoft Open Source Projects | Tech Lover

Love podcasts or audiobooks? Learn on the go with our new app.

Recommended from Medium

Android All About Page Part 1: Dynamic Design of ViewPager2+TabLayout

Is Google Fuchsia Android’s Replacement?

Struggling with Slow Android Build 🥺?

[Free Run Game] SUSHI-RUN

Flutter — Error: FileNotFoundException: AndroidManifest.xml

Implementing Google Login With Firebase in Your Android Application

Expert: Xamarin Android HMS Map Kit Integration With Polyline in SampleApp

👨🏼‍💻Custom UI Audio Player & Text Tracking — Audio Editor Kit & AI Dubbing

Get the Medium app

A button that says 'Download on the App Store', and if clicked it will lead you to the iOS App store
A button that says 'Get it on, Google Play', and if clicked it will lead you to the Google Play store
Xiao Ling

Xiao Ling

Manager of Dynamsoft Open Source Projects | Tech Lover

More from Medium

Get Started with RxKotlin

The Quickest Way to Create an Android QR Code Scanner

Know more about Network Service Discovery(NSD) in Android and approaches to discover services in…

👨🏼‍💻Guide for Using Cloud DB with Hilt