summaryrefslogtreecommitdiffstats
path: root/src/org/codeaurora
diff options
context:
space:
mode:
authorJay Wang <jaywang@codeaurora.org>2016-03-30 17:43:18 -0700
committerSteve Kondik <steve@cyngn.com>2016-08-21 18:45:51 -0700
commit43bb58af79bf830401041d61a115fa18e70d5c35 (patch)
tree9e6b21c83ee7e673cc8bb57a8be3b4c450a266b9 /src/org/codeaurora
parentb3b964bcd3946f4fb41b5c34cdd890dc3a25ab1a (diff)
downloadandroid_packages_apps_Snap-43bb58af79bf830401041d61a115fa18e70d5c35.zip
android_packages_apps_Snap-43bb58af79bf830401041d61a115fa18e70d5c35.tar.gz
android_packages_apps_Snap-43bb58af79bf830401041d61a115fa18e70d5c35.tar.bz2
SnapdragonCamera: refactor clearsight code and add persist flags
Move out core ClearSight logic to ClearSightImageProcessor.java Move core ClearSight files to new package: org/codeaurora/snapcam/filter Add persist flags: timestamp difference threshold: persist.camera.cs.threshold burst shot count: persist.camera.cs.burstcount dump source frames: persist.camera.cs.dumpframes CRs-Fixed: 993611 Change-Id: Ic8953a72e8697e494c3ae342bebc70f60540474b
Diffstat (limited to 'src/org/codeaurora')
-rw-r--r--src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java663
-rw-r--r--src/org/codeaurora/snapcam/filter/ClearSightNativeEngine.java336
2 files changed, 999 insertions, 0 deletions
diff --git a/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java b/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java
new file mode 100644
index 0000000..45856c7
--- /dev/null
+++ b/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java
@@ -0,0 +1,663 @@
+/*
+ * Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.codeaurora.snapcam.filter;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.List;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCaptureSession.CaptureCallback;
+import android.hardware.camera2.CameraDevice;
+import android.hardware.camera2.CaptureFailure;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.InputConfiguration;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.media.ImageReader;
+import android.media.ImageReader.OnImageAvailableListener;
+import android.media.ImageWriter;
+import android.os.Handler;
+import android.os.HandlerThread;
+import android.os.Looper;
+import android.os.Message;
+import android.os.SystemProperties;
+import android.util.Log;
+import android.view.Surface;
+
+import com.android.camera.MediaSaveService;
+import com.android.camera.PhotoModule.NamedImages;
+import com.android.camera.PhotoModule.NamedImages.NamedEntity;
+import org.codeaurora.snapcam.filter.ClearSightNativeEngine.ClearsightImage;
+
+public class ClearSightImageProcessor {
+ private static final String TAG = "ClearSightImageProcessor";
+ private static final String PERSIST_TIMESTAMP_LIMIT_KEY = "persist.camera.cs.threshold";
+ private static final String PERSIST_BURST_COUNT_KEY = "persist.camera.cs.burstcount";
+ private static final String PERSIST_DUMP_FRAMES_KEY = "persist.camera.cs.dumpframes";
+
+ private static final long DEFAULT_TIMESTAMP_THRESHOLD_MS = 10;
+ private static final int DEFAULT_IMAGES_TO_BURST = 5;
+
+ private static final int MSG_START_CAPTURE = 0;
+ private static final int MSG_NEW_IMG = 1;
+ private static final int MSG_NEW_RESULT = 2;
+
+ private static final int CAM_TYPE_BAYER = 0;
+ private static final int CAM_TYPE_MONO = 1;
+ private static final int NUM_CAM = 2;
+
+ private static CaptureResult.Key<Byte> OTP_CALIB_BLOB =
+ new CaptureResult.Key<>(
+ "org.codeaurora.qcamera3.dualcam_calib_meta_data.dualcam_calib_meta_data_blob",
+ Byte.class);
+
+ private NamedImages mNamedImages;
+ private ImageReader[] mImageReader = new ImageReader[NUM_CAM];
+ private ImageReader[] mReprocessImageReader = new ImageReader[NUM_CAM];
+ private ImageWriter[] mImageWriter = new ImageWriter[NUM_CAM];
+
+ private ImageProcessHandler mImageProcessHandler;
+ private ImageReprocessHandler mImageReprocessHandler;
+ private HandlerThread mImageProcessThread;
+ private HandlerThread mImageReprocessThread;
+ private Callback mCallback;
+
+ private long mTimestampThresholdNs;
+ private int mNumBurstCount;
+ private boolean mDumpImages;
+
+ private static ClearSightImageProcessor mInstance;
+
+ private ClearSightImageProcessor() {
+ mNamedImages = new NamedImages();
+ long threshMs = SystemProperties.getLong(PERSIST_TIMESTAMP_LIMIT_KEY, DEFAULT_TIMESTAMP_THRESHOLD_MS);
+ mTimestampThresholdNs = threshMs * 1000000;
+ Log.d(TAG, "mTimestampThresholdNs: " + mTimestampThresholdNs);
+
+ mNumBurstCount = SystemProperties.getInt(PERSIST_BURST_COUNT_KEY, DEFAULT_IMAGES_TO_BURST);
+ Log.d(TAG, "mNumBurstCount: " + mNumBurstCount);
+
+ mDumpImages = SystemProperties.getBoolean(PERSIST_DUMP_FRAMES_KEY, false);
+ Log.d(TAG, "mDumpImages: " + mDumpImages);
+ }
+
+ public static void createInstance() {
+ if(mInstance == null) {
+ mInstance = new ClearSightImageProcessor();
+ ClearSightNativeEngine.createInstance();
+ }
+ }
+
+ public static ClearSightImageProcessor getInstance() {
+ if(mInstance == null) {
+ createInstance();
+ }
+ return mInstance;
+ }
+
+ public void init(int width, int height) {
+ mImageProcessThread = new HandlerThread("CameraImageProcess");
+ mImageProcessThread.start();
+ mImageReprocessThread = new HandlerThread("CameraImageReprocess");
+ mImageReprocessThread.start();
+
+ mImageProcessHandler = new ImageProcessHandler(mImageProcessThread.getLooper());
+ mImageReprocessHandler = new ImageReprocessHandler(mImageReprocessThread.getLooper());
+
+ mImageReader[CAM_TYPE_BAYER] = createImageReader(CAM_TYPE_BAYER, width, height);
+ mImageReader[CAM_TYPE_MONO] = createImageReader(CAM_TYPE_MONO, width, height);
+ mReprocessImageReader[CAM_TYPE_BAYER] = createReprocImageReader(CAM_TYPE_BAYER, width, height);
+ mReprocessImageReader[CAM_TYPE_MONO] = createReprocImageReader(CAM_TYPE_MONO, width, height);
+ }
+
+ public void close() {
+ for(int i=0; i<mImageReader.length; i++) {
+ if (null != mImageReader[i]) {
+ mImageReader[i].close();
+ mImageReader[i] = null;
+ }
+ if (null != mReprocessImageReader[i]) {
+ mReprocessImageReader[i].close();
+ mReprocessImageReader[i] = null;
+ }
+ if (null != mImageWriter[i]) {
+ mImageWriter[i].close();
+ mImageWriter[i] = null;
+ }
+ }
+
+ if(mImageProcessThread != null) {
+ mImageProcessThread.quitSafely();
+
+ try {
+ mImageProcessThread.join();
+ mImageProcessThread = null;
+ mImageProcessHandler = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+
+ if(mImageReprocessThread != null) {
+ mImageReprocessThread.quitSafely();
+
+ try {
+ mImageReprocessThread.join();
+ mImageReprocessThread = null;
+ mImageReprocessHandler = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+ public void setCallback(Callback callback) {
+ mCallback = callback;
+ }
+
+ public void createCaptureSession(boolean bayer, CameraDevice device, List<Surface> surfaces,
+ CameraCaptureSession.StateCallback captureSessionCallback) throws CameraAccessException {
+
+ Log.d(TAG, "createCaptureSession: " + bayer);
+
+ int cam = bayer?CAM_TYPE_BAYER:CAM_TYPE_MONO;
+ surfaces.add(mImageReader[cam].getSurface());
+ surfaces.add(mReprocessImageReader[cam].getSurface());
+ // Here, we create a CameraCaptureSession for camera preview.
+ device.createReprocessableCaptureSession(
+ new InputConfiguration(mImageReader[cam].getWidth(),
+ mImageReader[cam].getHeight(), mImageReader[cam].getImageFormat()),
+ surfaces, captureSessionCallback, null);
+ }
+
+ public void onCaptureSessionConfigured(boolean bayer, CameraCaptureSession session) {
+ Log.d(TAG, "onCaptureSessionConfigured: " + bayer);
+
+ mImageWriter[bayer?CAM_TYPE_BAYER:CAM_TYPE_MONO] =
+ ImageWriter.newInstance(session.getInputSurface(), mNumBurstCount);
+ }
+
+ public CaptureRequest.Builder createCaptureRequest(CameraDevice device) throws CameraAccessException {
+ Log.d(TAG, "createCaptureRequest");
+
+ CaptureRequest.Builder builder = device.createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+ // Orientation
+ // int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
+ // captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
+ return builder;
+ }
+
+ public void capture(boolean bayer, CameraCaptureSession session, CaptureRequest.Builder requestBuilder,
+ Handler captureCallbackHandler) throws CameraAccessException {
+ Log.d(TAG, "capture: " + bayer);
+
+ final int cam = bayer?CAM_TYPE_BAYER:CAM_TYPE_MONO;
+
+ CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
+ Log.d(TAG, "captureStillPicture onCaptureCompleted: " + cam);
+ mImageProcessHandler.obtainMessage(MSG_NEW_RESULT,
+ cam, 0, result).sendToTarget();
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureFailure result) {
+ Log.d(TAG, "captureStillPicture onCaptureFailed: " + cam);
+ mImageProcessHandler.obtainMessage(MSG_NEW_RESULT,
+ cam, 1, result).sendToTarget();
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraCaptureSession session, int
+ sequenceId, long frameNumber) {
+ Log.d(TAG, "captureStillPicture onCaptureSequenceCompleted: " + cam);
+ }
+ };
+
+ List<CaptureRequest> burstList = new ArrayList<CaptureRequest>();
+ requestBuilder.addTarget(mImageReader[cam].getSurface());
+ for (int i = 0; i < mNumBurstCount; i++) {
+ requestBuilder.setTag(new Object());
+ CaptureRequest request = requestBuilder.build();
+ burstList.add(request);
+ }
+
+ mImageProcessHandler.obtainMessage(MSG_START_CAPTURE, cam, burstList.size()).sendToTarget();
+ session.captureBurst(burstList, captureCallback, captureCallbackHandler);
+ }
+
+ private ImageReader createImageReader(final int cam, int width, int height) {
+ ImageReader reader = ImageReader.newInstance(width, height,
+ ImageFormat.YUV_420_888, mNumBurstCount);
+ reader.setOnImageAvailableListener(new OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Log.d(TAG, "image available for cam: " + cam);
+ mImageProcessHandler.obtainMessage(
+ MSG_NEW_IMG, cam, 0, reader.acquireNextImage()).sendToTarget();
+ }
+ }, null);
+
+ return reader;
+ }
+
+ private ImageReader createReprocImageReader(final int cam, int width, int height) {
+ ImageReader reader = ImageReader.newInstance(width, height,
+ ImageFormat.YUV_420_888, mNumBurstCount);
+ reader.setOnImageAvailableListener(new OnImageAvailableListener() {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Log.d(TAG, "reprocessed image available for cam: " + cam);
+ mImageReprocessHandler.obtainMessage(
+ MSG_NEW_IMG, cam, 0, reader.acquireNextImage()).sendToTarget();
+ }
+ }, null);
+
+ return reader;
+ }
+
+ public interface Callback {
+ public void onClearSightSuccess(ClearsightImage csImage, YuvImage bayer, YuvImage mono);
+ public void onClearSightFailure(YuvImage bayer, YuvImage mono);
+ public CameraCaptureSession onReprocess(boolean bayer);
+ public MediaSaveService getMediaSaveService();
+ }
+
+ private static class ReprocessableImage {
+ final Image mImage;
+ final TotalCaptureResult mCaptureResult;
+
+ ReprocessableImage(Image image, TotalCaptureResult result) {
+ mImage = image;
+ mCaptureResult = result;
+ }
+ }
+
+ private class ImageProcessHandler extends Handler {
+ private ArrayDeque<ReprocessableImage> mBayerFrames = new ArrayDeque<ReprocessableImage>(
+ mNumBurstCount);
+ private ArrayDeque<ReprocessableImage> mMonoFrames = new ArrayDeque<ReprocessableImage>(
+ mNumBurstCount);
+ private ArrayDeque<TotalCaptureResult> mBayerCaptureResults = new ArrayDeque<TotalCaptureResult>(
+ mNumBurstCount);
+ private ArrayDeque<TotalCaptureResult> mMonoCaptureResults = new ArrayDeque<TotalCaptureResult>(
+ mNumBurstCount);
+ private ArrayDeque<Image> mBayerImages = new ArrayDeque<Image>(
+ mNumBurstCount);
+ private ArrayDeque<Image> mMonoImages = new ArrayDeque<Image>(
+ mNumBurstCount);
+ private int[] mNumImagesToProcess = new int[NUM_CAM];
+
+ public ImageProcessHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_START_CAPTURE:
+ mNumImagesToProcess[msg.arg1] = msg.arg2;
+ break;
+ case MSG_NEW_IMG:
+ case MSG_NEW_RESULT:
+ processNewEvent(msg);
+ break;
+ }
+ }
+
+ private void processNewEvent(Message msg) {
+ ArrayDeque<Image> imageQueue;
+ ArrayDeque<TotalCaptureResult> resultQueue;
+ ArrayDeque<ReprocessableImage> reprocQueue;
+ // push image onto queue
+ if (msg.arg1 == CAM_TYPE_BAYER) {
+ imageQueue = mBayerImages;
+ resultQueue = mBayerCaptureResults;
+ reprocQueue = mBayerFrames;
+ } else {
+ imageQueue = mMonoImages;
+ resultQueue = mMonoCaptureResults;
+ reprocQueue = mMonoFrames;
+ }
+
+ if(msg.what == MSG_NEW_IMG) {
+ Log.d(TAG, "processNewEvent - newImg: " + msg.arg1);
+ Image image = (Image) msg.obj;
+ imageQueue.add(image);
+ } else if(msg.arg2 == 1) {
+ Log.d(TAG, "processNewEvent - new failed result: " + msg.arg1);
+ mNumImagesToProcess[msg.arg1]--;
+ } else {
+ Log.d(TAG, "processNewEvent - newResult: " + msg.arg1);
+ TotalCaptureResult result = (TotalCaptureResult) msg.obj;
+ resultQueue.add(result);
+ }
+
+ Log.d(TAG, "processNewEvent - cam: " + msg.arg1 + " num imgs: "
+ + imageQueue.size() + " num results: " + resultQueue.size());
+
+ if (!imageQueue.isEmpty() && !resultQueue.isEmpty()) {
+ Image headImage = imageQueue.poll();
+ TotalCaptureResult headResult = resultQueue.poll();
+ reprocQueue.add(new ReprocessableImage(headImage, headResult));
+ mNumImagesToProcess[msg.arg1]--;
+ checkForValidFramePair();
+ }
+
+ Log.d(TAG, "processNewEvent - imagestoprocess[bayer] " + mNumImagesToProcess[CAM_TYPE_BAYER] +
+ " imagestoprocess[mono]: " + mNumImagesToProcess[CAM_TYPE_MONO]);
+
+ if (mNumImagesToProcess[CAM_TYPE_BAYER] == 0
+ && mNumImagesToProcess[CAM_TYPE_MONO] == 0) {
+ processReprocess();
+ }
+ }
+
+ private void checkForValidFramePair() {
+ // if we have images from both
+ // as we just added an image onto one of the queues
+ // this condition is only true when both are not empty
+ Log.d(TAG,
+ "checkForValidFramePair - num bayer frames: "
+ + mBayerFrames.size() + " num mono frames: "
+ + mMonoFrames.size());
+
+ if (!mBayerFrames.isEmpty() && !mMonoFrames.isEmpty()) {
+ // peek oldest pair of images
+ ReprocessableImage bayer = mBayerFrames.peek();
+ ReprocessableImage mono = mMonoFrames.peek();
+
+ Log.d(TAG,
+ "checkForValidFramePair - bayer ts: "
+ + bayer.mImage.getTimestamp() + " mono ts: "
+ + mono.mImage.getTimestamp());
+ Log.d(TAG,
+ "checkForValidFramePair - difference: "
+ + Math.abs(bayer.mImage.getTimestamp()
+ - mono.mImage.getTimestamp()));
+ // if timestamps are within threshold, keep frames
+ if (Math.abs(bayer.mImage.getTimestamp()
+ - mono.mImage.getTimestamp()) > mTimestampThresholdNs) {
+ Log.d(TAG, "checkForValidFramePair - toss pair");
+ // no match, toss
+ bayer = mBayerFrames.poll();
+ mono = mMonoFrames.poll();
+ bayer.mImage.close();
+ mono.mImage.close();
+ }
+ }
+ }
+
+ private void releaseBayerFrames() {
+ for (ReprocessableImage reprocImg : mBayerFrames) {
+ reprocImg.mImage.close();
+ }
+
+ mBayerFrames.clear();
+ }
+
+ private void releaseMonoFrames() {
+ for (ReprocessableImage reprocImg : mMonoFrames) {
+ reprocImg.mImage.close();
+ }
+
+ mMonoFrames.clear();
+ }
+
+ private void processReprocess() {
+ if(mCallback != null) {
+ if (mBayerFrames.size() != mMonoFrames.size()
+ || mBayerFrames.isEmpty()) {
+ Log.d(TAG, "processReprocess - frame size mismatch or empty");
+ releaseBayerFrames();
+ releaseMonoFrames();
+ mCallback.onClearSightFailure(null, null);
+ return;
+ } else {
+ sendReprocessRequests(CAM_TYPE_BAYER);
+ sendReprocessRequests(CAM_TYPE_MONO);
+ }
+ } else {
+ releaseBayerFrames();
+ releaseMonoFrames();
+ }
+ }
+
+ private void sendReprocessRequests(final int camId) {
+ CameraCaptureSession session = mCallback.onReprocess(camId == CAM_TYPE_BAYER);
+ CameraDevice device = session.getDevice();
+
+ try {
+ ArrayDeque<ReprocessableImage> frameQueue;
+ if (camId == CAM_TYPE_BAYER) {
+ frameQueue = mBayerFrames;
+ } else {
+ frameQueue = mMonoFrames;
+ }
+ Log.d(TAG, "sendReprocessRequests - start cam: " + camId
+ + " num frames: " + frameQueue.size());
+
+ ArrayList<CaptureRequest> reprocRequests = new ArrayList<CaptureRequest>(
+ frameQueue.size());
+ while (!frameQueue.isEmpty()) {
+ ReprocessableImage reprocImg = frameQueue.poll();
+
+ CaptureRequest.Builder reprocRequest = device
+ .createReprocessCaptureRequest(reprocImg.mCaptureResult);
+ reprocRequest.addTarget(mReprocessImageReader[camId]
+ .getSurface());
+ reprocRequests.add(reprocRequest.build());
+
+ mImageWriter[camId].queueInputImage(reprocImg.mImage);
+ }
+
+ mImageReprocessHandler.obtainMessage(MSG_START_CAPTURE, camId,
+ reprocRequests.size()).sendToTarget();
+ session.captureBurst(reprocRequests,
+ new CaptureCallback() {
+ @Override
+ public void onCaptureCompleted(
+ CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
+ super.onCaptureCompleted(session, request, result);
+ Log.d(TAG, "reprocess - onCaptureCompleted: "
+ + camId);
+ // TODO: parse OTP Calib data to be used in final CS
+ // result.get(OTP_CALIB_BLOB);
+ }
+
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureFailure failure) {
+ super.onCaptureFailed(session, request, failure);
+ Log.d(TAG, "reprocess - onCaptureFailed: "
+ + camId);
+ mImageReprocessHandler.obtainMessage(
+ MSG_NEW_RESULT, camId, 1)
+ .sendToTarget();
+ }
+ }, null);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+ };
+
+ private class ImageReprocessHandler extends Handler {
+ private int[] mNumImagesToProcess = new int[NUM_CAM];
+
+ public ImageReprocessHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_START_CAPTURE:
+ mNumImagesToProcess[msg.arg1] = msg.arg2;
+ break;
+ case MSG_NEW_IMG:
+ case MSG_NEW_RESULT:
+ processNewEvent(msg);
+ break;
+ }
+ }
+
+ private void processNewEvent(Message msg) {
+ boolean isBayer = (msg.arg1 == CAM_TYPE_BAYER);
+
+ if(msg.what == MSG_NEW_IMG) {
+ Image image = (Image) msg.obj;
+ Log.d(TAG, "reprocess - processNewImg");
+
+ if(mDumpImages) {
+ saveDebugImage(mCallback.getMediaSaveService(), image, true);
+ }
+
+ if (!ClearSightNativeEngine.getInstance()
+ .hasReferenceImage(isBayer)) {
+ // reference not yet set
+ ClearSightNativeEngine.getInstance().setReferenceImage(isBayer,
+ image);
+ } else {
+ // if ref images set, register this image
+ if(ClearSightNativeEngine.getInstance().registerImage(
+ isBayer, image) == false) {
+ Log.w(TAG, "registerImage : terminal error with input image");
+ }
+ }
+ mNumImagesToProcess[msg.arg1]--;
+ } else if (msg.arg2 == 1) {
+ // capture failed
+ mNumImagesToProcess[msg.arg1]--;
+ }
+
+ Log.d(TAG, "reprocess - processNewEvent, cam: " + msg.arg1
+ + " count: " + mNumImagesToProcess[msg.arg1]);
+
+ if (mNumImagesToProcess[CAM_TYPE_BAYER] == 0
+ && mNumImagesToProcess[CAM_TYPE_MONO] == 0) {
+ processClearSight();
+ }
+ }
+
+ private void processClearSight() {
+ Log.d(TAG, "reprocess - processClearSight, bayercount: "
+ + mNumImagesToProcess[CAM_TYPE_BAYER] + " mono count: "
+ + mNumImagesToProcess[CAM_TYPE_MONO]);
+
+ if(mCallback != null) {
+ ClearSightNativeEngine.ClearsightImage csImage = ClearSightNativeEngine
+ .getInstance().processImage();
+
+ if(csImage != null) {
+ Log.d(TAG, "reprocess - processClearSight, roiRect: "
+ + csImage.getRoiRect().toString());
+ mCallback.onClearSightSuccess(csImage,
+ createYuvImage(ClearSightNativeEngine.getInstance().getReferenceImage(true)),
+ createYuvImage(ClearSightNativeEngine.getInstance().getReferenceImage(false)));
+ } else {
+ mCallback.onClearSightFailure(
+ createYuvImage(ClearSightNativeEngine.getInstance().getReferenceImage(true)),
+ createYuvImage(ClearSightNativeEngine.getInstance().getReferenceImage(false)));
+ }
+ }
+ ClearSightNativeEngine.getInstance().reset();
+ }
+ };
+
+ public void saveDebugImage(MediaSaveService service, byte[] data,
+ int width, int height, boolean isReproc) {
+ mNamedImages.nameNewImage(System.currentTimeMillis());
+ NamedEntity name = mNamedImages.getNextNameEntity();
+ String title = (name == null) ? null : name.title;
+ long date = (name == null) ? -1 : name.date;
+
+ if (isReproc) {
+ title += "_reproc";
+ }
+
+ service.addImage(data, title, date, null,
+ width, height, 0, null, null,
+ service.getContentResolver(), "jpeg");
+ }
+
+ public void saveDebugImage(MediaSaveService service, YuvImage image, boolean isReproc) {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ image.compressToJpeg(
+ new Rect(0, 0, image.getWidth(), image.getHeight()), 100, baos);
+
+ saveDebugImage(service, baos.toByteArray(), image.getWidth(), image.getHeight(),
+ isReproc);
+ }
+
+ public void saveDebugImage(MediaSaveService service, Image image, boolean isReproc) {
+ saveDebugImage(service, createYuvImage(image), isReproc);
+ }
+
+ public YuvImage createYuvImage(Image image) {
+ if (image == null) {
+ Log.d(TAG, "createYuvImage - invalid param");
+ return null;
+ }
+ Plane[] planes = image.getPlanes();
+ ByteBuffer yBuffer = planes[0].getBuffer();
+ ByteBuffer vuBuffer = planes[2].getBuffer();
+ int sizeY = yBuffer.capacity();
+ int sizeVU = vuBuffer.capacity();
+ byte[] data = new byte[sizeY + sizeVU];
+ yBuffer.rewind();
+ yBuffer.get(data, 0, sizeY);
+ vuBuffer.rewind();
+ vuBuffer.get(data, sizeY, sizeVU);
+ int[] strides = new int[] { planes[0].getRowStride(),
+ planes[2].getRowStride() };
+
+ return new YuvImage(data, ImageFormat.NV21, image.getWidth(),
+ image.getHeight(), strides);
+ }
+}
diff --git a/src/org/codeaurora/snapcam/filter/ClearSightNativeEngine.java b/src/org/codeaurora/snapcam/filter/ClearSightNativeEngine.java
new file mode 100644
index 0000000..d6f0899
--- /dev/null
+++ b/src/org/codeaurora/snapcam/filter/ClearSightNativeEngine.java
@@ -0,0 +1,336 @@
+/*
+ * Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.codeaurora.snapcam.filter;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.util.Log;
+
+public class ClearSightNativeEngine {
+ private static final String TAG = "ClearSightNativeEngine";
+ static {
+ try {
+ System.loadLibrary("jni_clearsight");
+ mLibLoaded = true;
+ Log.v(TAG, "successfully loaded clearsight lib");
+ } catch (UnsatisfiedLinkError e) {
+ Log.e(TAG, "failed to load clearsight lib");
+ e.printStackTrace();
+ mLibLoaded = false;
+ }
+ }
+
+ private static final int METADATA_SIZE = 5;
+ private static final int Y_PLANE = 0;
+ private static final int VU_PLANE = 2;
+
+ // dummy OTP calib data
+ private static final String otp_calib = "Calibration OTP format version = 10301\n"
+ + "Main Native Sensor Resolution width = 4224px\n"
+ + "Main Native Sensor Resolution height = 3136px\n"
+ + "Main Calibration Resolution width = 1280px\n"
+ + "Main Calibration Resolution height = 950px\n"
+ + "Main Focal length ratio = 1.004896\n"
+ + "Aux Native Sensor Resolution width = 1600px\n"
+ + "Aux Native Sensor Resolution height = 1200px\n"
+ + "Aux Calibration Resolution width = 1280px\n"
+ + "Aux Calibration Resolution height = 960px\n"
+ + "Aux Focal length ratio = 1.000000\n"
+ + "Relative Rotation matrix [0] through [8] = 1.000000,-0.003008,0.000251,0.003073,1.000189,0.003329,0.019673,-0.003329,1.000284\n"
+ + "Relative Geometric surface parameters [0] through [31] = -0.307164,-0.879074,4.636152,0.297486,-0.157539,-6.889396,0.109467,-2.797022,-0.066306,-0.120142,0.196464,0.021974,2.905827,0.241197,0.048328,-5.116615,0.496533,-5.263813,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000\n"
+ + "Relative Principal point X axis offset (ox) = 0.000000px\n"
+ + "Relative Principal point Y axis offset (oy) = 0.000000px\n"
+ + "Relative position flag = 1\n"
+ + "Baseline distance = 20.000000mm\n"
+ + "Main sensor mirror and flip setting = 3\n"
+ + "Aux sensor mirror and flip setting = 3\n"
+ + "Module orientation during calibration = 0\n"
+ + "Rotation flag = 0\n"
+ + "Main Normalized Focal length = 1000.0px\n"
+ + "Aux Normalized Focal length = 1000.0px";
+
+ private static boolean mLibLoaded;
+ private static ClearSightNativeEngine mInstance;
+
+ private Image mRefColorImage;
+ private Image mRefMonoImage;
+ private ArrayList<SourceImage> mSrcColor = new ArrayList<SourceImage>();
+ private ArrayList<SourceImage> mSrcMono = new ArrayList<SourceImage>();
+
+ private ClearSightNativeEngine() {
+ }
+
+ public static void createInstance() {
+ if (mInstance == null) {
+ mInstance = new ClearSightNativeEngine();
+ }
+ }
+
+ public static ClearSightNativeEngine getInstance() {
+ createInstance();
+ return mInstance;
+ }
+
+ public boolean isLibLoaded() {
+ return mLibLoaded;
+ }
+
+ public void reset() {
+ mSrcColor.clear();
+ mSrcMono.clear();
+ setReferenceColorImage(null);
+ setReferenceMonoImage(null);
+ }
+
+ public void setReferenceImage(boolean color, Image image) {
+ if (color)
+ setReferenceColorImage(image);
+ else
+ setReferenceMonoImage(image);
+ }
+
+ private void setReferenceColorImage(Image reference) {
+ if (mRefColorImage != null) {
+ mRefColorImage.close();
+ mRefColorImage = null;
+ }
+
+ mRefColorImage = reference;
+
+ if (mRefColorImage != null) {
+ Log.e(TAG, "setRefColorImage");
+ mSrcColor.add(new SourceImage(mRefColorImage.getPlanes()[Y_PLANE]
+ .getBuffer(), mRefColorImage.getPlanes()[VU_PLANE]
+ .getBuffer(), new int[] { 0, 0, 0, 0, 0 }));
+ }
+ }
+
+ private void setReferenceMonoImage(Image reference) {
+ if (mRefMonoImage != null) {
+ mRefMonoImage.close();
+ mRefMonoImage = null;
+ }
+
+ mRefMonoImage = reference;
+
+ if (mRefMonoImage != null) {
+ Log.e(TAG, "setRefMonoImage");
+ mSrcMono.add(new SourceImage(mRefMonoImage.getPlanes()[Y_PLANE]
+ .getBuffer(), null, new int[] { 0, 0, 0, 0, 0 }));
+ }
+ }
+
+ public boolean hasReferenceImage(boolean color) {
+ return !(color ? mSrcColor.isEmpty() : mSrcMono.isEmpty());
+ }
+
+ public Image getReferenceImage(boolean color) {
+ return color ? mRefColorImage : mRefMonoImage;
+ }
+
+ public boolean registerImage(boolean color, Image image) {
+ List<SourceImage> sourceImages = color?mSrcColor:mSrcMono;
+ if (sourceImages.isEmpty()) {
+ Log.w(TAG, "reference image not yet set");
+ return false;
+ }
+
+ Image referenceImage = color?mRefColorImage:mRefMonoImage;
+ Plane[] planes = image.getPlanes();
+ ByteBuffer yBuf = planes[Y_PLANE].getBuffer();
+ ByteBuffer refY = referenceImage.getPlanes()[Y_PLANE].getBuffer();
+ ByteBuffer regY = ByteBuffer.allocateDirect(refY.capacity());
+ int yRowStride = planes[Y_PLANE].getRowStride();
+
+ ByteBuffer vuBuf = null;
+ ByteBuffer refVU = null;
+ ByteBuffer regVU = null;
+ int vuRowStride = 0;
+ if(color) {
+ vuBuf = planes[VU_PLANE].getBuffer();
+ refVU = referenceImage.getPlanes()[VU_PLANE].getBuffer();
+ regVU = ByteBuffer.allocateDirect(refVU.capacity());
+ vuRowStride = planes[VU_PLANE].getRowStride();
+ }
+
+ int[] metadata = new int[METADATA_SIZE];
+
+ boolean result = nativeClearSightRegisterImage(refY,
+ yBuf, vuBuf, image.getWidth(), image.getHeight(),
+ yRowStride, vuRowStride, regY, regVU, metadata);
+
+ if (result) {
+ sourceImages.add(new SourceImage(regY, regVU, metadata));
+ }
+
+ image.close();
+ return result;
+ }
+
+ public ClearsightImage processImage() {
+ // check data validity
+ if (mSrcColor.size() != mSrcMono.size()) {
+ // mis-match in num images
+ Log.e(TAG, "processImage - numImages mismatch - bayer: "
+ + mSrcColor.size() + ", mono: " + mSrcMono.size());
+ return null;
+ }
+
+ int numImages = mSrcColor.size();
+ ByteBuffer[] srcColorY = new ByteBuffer[numImages];
+ ByteBuffer[] srcColorVU = new ByteBuffer[numImages];
+ int[][] metadataColor = new int[numImages][];
+ ByteBuffer[] srcMonoY = new ByteBuffer[numImages];
+ int[][] metadataMono = new int[numImages][];
+
+ Log.e(TAG, "processImage - numImages: " + numImages);
+
+ for (int i = 0; i < numImages; i++) {
+ SourceImage color = mSrcColor.get(i);
+ SourceImage mono = mSrcMono.get(i);
+
+ srcColorY[i] = color.mY;
+ srcColorVU[i] = color.mVU;
+ metadataColor[i] = color.mMetadata;
+
+ srcMonoY[i] = mono.mY;
+ metadataMono[i] = mono.mMetadata;
+ }
+
+ Plane[] colorPlanes = mRefColorImage.getPlanes();
+ Plane[] monoPlanes = mRefMonoImage.getPlanes();
+ ByteBuffer dstY = ByteBuffer.allocateDirect(colorPlanes[Y_PLANE]
+ .getBuffer().capacity());
+ ByteBuffer dstVU = ByteBuffer.allocateDirect(colorPlanes[VU_PLANE]
+ .getBuffer().capacity());
+ int[] roiRect = new int[4];
+
+ Log.e(TAG, "processImage - refImage size - y: "
+ + colorPlanes[Y_PLANE].getBuffer().capacity()
+ + " vu: " + colorPlanes[VU_PLANE].getBuffer().capacity());
+
+ Log.e(TAG, "processImage - dst size - y: "
+ + dstY.capacity() + " vu: " + dstVU.capacity());
+
+ boolean result = nativeClearSightProcess(numImages, srcColorY, srcColorVU,
+ metadataColor, mRefColorImage.getWidth(),
+ mRefColorImage.getHeight(),
+ colorPlanes[Y_PLANE].getRowStride(),
+ colorPlanes[VU_PLANE].getRowStride(), srcMonoY, metadataMono,
+ mRefMonoImage.getWidth(), mRefMonoImage.getHeight(),
+ monoPlanes[Y_PLANE].getRowStride(), otp_calib.getBytes(), dstY, dstVU,
+ colorPlanes[Y_PLANE].getRowStride(),
+ colorPlanes[VU_PLANE].getRowStride(), roiRect);
+
+ if (result) {
+ dstY.rewind();
+ dstVU.rewind();
+ byte[] data = new byte[dstY.capacity() + dstVU.capacity()];
+ int[] strides = new int[] { colorPlanes[Y_PLANE].getRowStride(),
+ colorPlanes[VU_PLANE].getRowStride() };
+ dstY.get(data, 0, dstY.capacity());
+ dstVU.get(data, dstY.capacity(), dstVU.capacity());
+ return new ClearsightImage(new YuvImage(data, ImageFormat.NV21,
+ mRefColorImage.getWidth(), mRefColorImage.getHeight(),
+ strides), roiRect);
+ } else {
+ return null;
+ }
+ }
+
+ private native final boolean nativeConfigureClearSight(float brIntensity, float sharpenIntensity);
+
+ private native final boolean nativeClearSightRegisterImage(ByteBuffer refY,
+ ByteBuffer srcY, ByteBuffer srcVU, int width, int height,
+ int strideY, int strideVU, ByteBuffer dstY, ByteBuffer dstVU,
+ int[] metadata);
+
+ private native final boolean nativeClearSightProcess(int numImagePairs,
+ ByteBuffer[] srcColorY, ByteBuffer[] srcColorVU,
+ int[][] metadataColor, int srcColorWidth, int srcColorHeight,
+ int srcColorStrideY, int srcColorStrideVU, ByteBuffer[] srcMonoY,
+ int[][] metadataMono, int srcMonoWidth, int srcMonoHeight,
+ int srcMonoStrideY, byte[] otp, ByteBuffer dstY, ByteBuffer dstVU,
+ int dstStrideY, int dstStrideVU, int[] roiRect);
+
+ private class SourceImage {
+ ByteBuffer mY;
+ ByteBuffer mVU;
+ int[] mMetadata;
+
+ SourceImage(ByteBuffer y, ByteBuffer vu, int[] metadata) {
+ mY = y;
+ mVU = vu;
+ mMetadata = metadata;
+ }
+ }
+
+ public static class ClearsightImage {
+ private YuvImage mImage;
+ private Rect mRoiRect;
+
+ ClearsightImage(YuvImage image, int[] rect) {
+ mImage = image;
+ mRoiRect = new Rect(rect[0], rect[1], rect[0] + rect[2], rect[1]
+ + rect[3]);
+ }
+
+ public Rect getRoiRect() {
+ return mRoiRect;
+ }
+
+ public long getDataLength() {
+ return (mImage==null?0:mImage.getYuvData().length);
+ }
+
+ public int getWidth() {
+ return (mRoiRect.right - mRoiRect.left);
+ }
+
+ public int getHeight() {
+ return (mRoiRect.bottom - mRoiRect.top);
+ }
+
+ public byte[] compressToJpeg() {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+ mImage.compressToJpeg(mRoiRect, 100, baos);
+ return baos.toByteArray();
+ }
+ }
+}