summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorCullum Baldwin <cullumb@codeaurora.org>2016-04-12 11:49:50 -0700
committerSteve Kondik <steve@cyngn.com>2016-08-25 21:55:20 -0700
commit760a77ec5d8daaa1cbcbdb02750b72e38e671652 (patch)
treed360f1611318ab81d9b22d435b14bef5942a85bd
parent98ed4bd0c2d895fa4ef7d531200b876b3f04e61c (diff)
downloadandroid_packages_apps_Snap-760a77ec5d8daaa1cbcbdb02750b72e38e671652.tar.gz
android_packages_apps_Snap-760a77ec5d8daaa1cbcbdb02750b72e38e671652.tar.bz2
android_packages_apps_Snap-760a77ec5d8daaa1cbcbdb02750b72e38e671652.zip
SnapdragonCamera: Add support for ClearSight capture
Add support to capture ClearSight images. CRs-Fixed: 993611 Change-Id: Ifaeafa53ee5d095fe322e48f968cdeec03c64077
-rw-r--r--res/values/qcomarrays.xml10
-rw-r--r--res/values/qcomstrings.xml11
-rw-r--r--res/xml/camera_preferences.xml18
-rw-r--r--src/com/android/camera/CameraSettings.java1
-rw-r--r--src/com/android/camera/CaptureMenu.java9
-rw-r--r--src/com/android/camera/CaptureModule.java812
-rw-r--r--src/com/android/camera/MediaSaveService.java127
-rw-r--r--src/com/android/camera/util/ClearSightNativeEngine.java351
8 files changed, 1234 insertions, 105 deletions
diff --git a/res/values/qcomarrays.xml b/res/values/qcomarrays.xml
index b80d0fdc9..3772e1729 100644
--- a/res/values/qcomarrays.xml
+++ b/res/values/qcomarrays.xml
@@ -855,4 +855,14 @@
<item>@string/pref_camera_mono_preview_value_on</item>
<item>@string/pref_camera_mono_preview_value_off</item>
</string-array>
+
+ <string-array name="pref_camera_clearsight_entries" translatable="true">
+ <item>@string/pref_camera_clearsight_entry_on</item>
+ <item>@string/pref_camera_clearsight_entry_off</item>
+ </string-array>
+
+ <string-array name="pref_camera_clearsight_entryvalues" translatable="false">
+ <item>@string/pref_camera_clearsight_value_on</item>
+ <item>@string/pref_camera_clearsight_value_off</item>
+ </string-array>
</resources>
diff --git a/res/values/qcomstrings.xml b/res/values/qcomstrings.xml
index d8b6b6c1a..b0c84b790 100644
--- a/res/values/qcomstrings.xml
+++ b/res/values/qcomstrings.xml
@@ -559,4 +559,15 @@
<string name="pref_camera_mono_preview_value_on">on</string>
<string name="pref_camera_mono_preview_value_off">off</string>
+ <string name="pref_camera_clearsight_title">ClearSight</string>
+ <string name="pref_camera_clearsight_default" translatable="false">off</string>
+ <string name="pref_camera_clearsight_entry_on">On</string>
+ <string name="pref_camera_clearsight_entry_off">Off</string>
+
+ <string name="pref_camera_clearsight_value_on" translatable="false">on</string>
+ <string name="pref_camera_clearsight_value_off" translatable="false">off</string>
+
+ <string name="clearsight_capture_success">ClearSight capture successful</string>
+ <string name="clearsight_capture_fail">ClearSight capture failed</string>
+
</resources>
diff --git a/res/xml/camera_preferences.xml b/res/xml/camera_preferences.xml
index e2d749f8c..0507472f7 100644
--- a/res/xml/camera_preferences.xml
+++ b/res/xml/camera_preferences.xml
@@ -379,9 +379,17 @@
camera:entryValues="@array/pref_camera_dual_camera_entryvalues" />
<ListPreference
- camera:key="pref_camera_mono_preview_key"
- camera:defaultValue="@string/pref_camera_mono_preview_default"
- camera:title="@string/pref_camera_mono_preview_title"
- camera:entries="@array/pref_camera_mono_preview_entries"
- camera:entryValues="@array/pref_camera_mono_preview_entryvalues" />
+ camera:defaultValue="@string/pref_camera_mono_preview_default"
+ camera:entries="@array/pref_camera_mono_preview_entries"
+ camera:entryValues="@array/pref_camera_mono_preview_entryvalues"
+ camera:key="pref_camera_mono_preview_key"
+ camera:title="@string/pref_camera_mono_preview_title" />
+
+ <ListPreference
+ camera:defaultValue="@string/pref_camera_clearsight_default"
+ camera:entries="@array/pref_camera_clearsight_entries"
+ camera:entryValues="@array/pref_camera_clearsight_entryvalues"
+ camera:key="pref_camera_clearsight_key"
+ camera:title="@string/pref_camera_clearsight_title" />
+
</PreferenceGroup>
diff --git a/src/com/android/camera/CameraSettings.java b/src/com/android/camera/CameraSettings.java
index f2058ab66..a7fade722 100644
--- a/src/com/android/camera/CameraSettings.java
+++ b/src/com/android/camera/CameraSettings.java
@@ -246,6 +246,7 @@ public class CameraSettings {
public static final String KEY_CAMERA2 = "pref_camera_camera2_key";
public static final String KEY_DUAL_CAMERA = "pref_camera_dual_camera_key";
public static final String KEY_MONO_PREVIEW = "pref_camera_mono_preview_key";
+ public static final String KEY_CLEARSIGHT = "pref_camera_clearsight_key";
public static final String KEY_SELFIE_FLASH = "pref_selfie_flash_key";
diff --git a/src/com/android/camera/CaptureMenu.java b/src/com/android/camera/CaptureMenu.java
index 99f919a79..8b8f5c357 100644
--- a/src/com/android/camera/CaptureMenu.java
+++ b/src/com/android/camera/CaptureMenu.java
@@ -97,7 +97,8 @@ public class CaptureMenu extends MenuController
CameraSettings.KEY_CAMERA_SAVEPATH,
CameraSettings.KEY_WHITE_BALANCE,
CameraSettings.KEY_CAMERA2,
- CameraSettings.KEY_DUAL_CAMERA
+ CameraSettings.KEY_DUAL_CAMERA,
+ CameraSettings.KEY_CLEARSIGHT
};
//Todo: 2nd string to contain only developer settings
@@ -109,6 +110,7 @@ public class CaptureMenu extends MenuController
CameraSettings.KEY_WHITE_BALANCE,
CameraSettings.KEY_CAMERA2,
CameraSettings.KEY_DUAL_CAMERA,
+ CameraSettings.KEY_CLEARSIGHT,
CameraSettings.KEY_MONO_PREVIEW
};
@@ -398,6 +400,8 @@ public class CaptureMenu extends MenuController
if (!pref.getValue().equals("dual")) {
setPreference(CameraSettings.KEY_MONO_PREVIEW, "off");
mListMenu.setPreferenceEnabled(CameraSettings.KEY_MONO_PREVIEW, false);
+ setPreference(CameraSettings.KEY_CLEARSIGHT, "off");
+ mListMenu.setPreferenceEnabled(CameraSettings.KEY_CLEARSIGHT, false);
}
if (mListener != null) {
@@ -555,6 +559,9 @@ public class CaptureMenu extends MenuController
if (value.equals("on")) {
} else if (value.equals("off")) {
}
+ } else if (key.equals(CameraSettings.KEY_CLEARSIGHT)) {
+ // restart module to re-create sessions and callbacks
+ mActivity.onModuleSelected(ModuleSwitcher.CAPTURE_MODULE_INDEX);
}
}
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index 52f9327e6..382e691b1 100644
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -19,8 +19,22 @@
package com.android.camera;
-import android.annotation.TargetApi;
-import android.app.Activity;
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayDeque;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Semaphore;
+import java.util.concurrent.TimeUnit;
+
+import org.codeaurora.snapcam.R;
+
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
@@ -28,20 +42,25 @@ import android.content.res.Configuration;
import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.Rect;
+import android.graphics.YuvImage;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
+import android.hardware.camera2.CaptureFailure;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
+import android.hardware.camera2.params.InputConfiguration;
import android.hardware.camera2.params.MeteringRectangle;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.CameraProfile;
import android.media.Image;
+import android.media.Image.Plane;
import android.media.ImageReader;
+import android.media.ImageWriter;
import android.net.Uri;
import android.os.Handler;
import android.os.HandlerThread;
@@ -61,20 +80,7 @@ import com.android.camera.PhotoModule.NamedImages;
import com.android.camera.PhotoModule.NamedImages.NamedEntity;
import com.android.camera.ui.RotateTextToast;
import com.android.camera.util.CameraUtil;
-
-import org.codeaurora.snapcam.R;
-
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Semaphore;
-import java.util.concurrent.TimeUnit;
+import com.android.camera.util.ClearSightNativeEngine;
public class CaptureModule implements CameraModule, PhotoController,
MediaSaveService.Listener {
@@ -83,6 +89,8 @@ public class CaptureModule implements CameraModule, PhotoController,
public static final int MONO_MODE = 2;
private static final int OPEN_CAMERA = 0;
private static final int MAX_NUM_CAM = 3;
+ private static final long TIMESTAMP_THRESHOLD_NS = 10*1000000; // 10 ms
+
/**
* Conversion from screen rotation to JPEG orientation.
*/
@@ -113,6 +121,13 @@ public class CaptureModule implements CameraModule, PhotoController,
private static final String TAG = "SnapCam_CaptureModule";
private static int MODE = DUAL_MODE;
+ private static final int MSG_START_CAPTURE = 0;
+ private static final int MSG_NEW_IMG = 1;
+ private static final int MSG_NEW_RESULT = 2;
+ private static final int MSG_SAVE = 4;
+
+ private static final int NUM_IMAGES_TO_BURST = 4;
+
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
@@ -130,6 +145,11 @@ public class CaptureModule implements CameraModule, PhotoController,
CaptureRequest.Key<Integer> BayerMonoLinkSessionIdKey =
new CaptureRequest.Key<>("org.codeaurora.qcamera3.dualcam_link_meta_data" +
".related_camera_id", Integer.class);
+
+ CaptureResult.Key<Byte> OTP_CALIB_BLOB =
+ new CaptureResult.Key<>("org.codeaurora.qcamera3.dualcam_calib_meta_data.dualcam_calib_meta_data_blob",
+ Byte.class);
+
private int mLastResultAFState = -1;
private Rect[] mCropRegion = new Rect[MAX_NUM_CAM];
private boolean mAutoFocusSupported;
@@ -165,16 +185,24 @@ public class CaptureModule implements CameraModule, PhotoController,
private HandlerThread mCameraThread;
private HandlerThread mImageAvailableThread;
private HandlerThread mCallbackThread;
+ private HandlerThread mImageProcessThread;
+ private HandlerThread mImageReprocessThread;
+
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mCameraHandler;
private Handler mImageAvailableHandler;
private Handler mCallbackHandler;
+ private ImageProcessHandler mImageProcessHandler;
+ private ImageReprocessHandler mImageReprocessHandler;
+
/**
* An {@link ImageReader} that handles still image capture.
*/
private ImageReader[] mImageReader = new ImageReader[MAX_NUM_CAM];
+ private ImageReader[] mReprocessImageReader = new ImageReader[MAX_NUM_CAM];
+ private ImageWriter[] mImageWriter = new ImageWriter[MAX_NUM_CAM];
private NamedImages mNamedImages;
private ContentResolver mContentResolver;
private MediaSaveService.OnMediaSavedListener mOnMediaSavedListener =
@@ -186,35 +214,472 @@ public class CaptureModule implements CameraModule, PhotoController,
}
}
};
- /**
- * This a callback object for the {@link ImageReader}. "onImageAvailable" will be called when a
- * still image is ready to be saved.
- */
- private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
- = new ImageReader.OnImageAvailableListener() {
+
+ private class ReprocessableImage {
+ final Image mImage;
+ final TotalCaptureResult mCaptureResult;
+
+ ReprocessableImage(Image image, TotalCaptureResult result) {
+ mImage = image;
+ mCaptureResult = result;
+ }
+ }
+
+ private abstract class ImageAvailableListener implements ImageReader.OnImageAvailableListener {
+ int mCamId;
+
+ ImageAvailableListener(int cameraId) {
+ mCamId = cameraId;
+ }
+ }
+
+ private abstract class CameraCaptureCallback extends CameraCaptureSession.CaptureCallback {
+ int mCamId;
+
+ CameraCaptureCallback(int cameraId) {
+ mCamId = cameraId;
+ }
+ }
+
+ private class ImageProcessHandler extends Handler {
+ private ArrayDeque<ReprocessableImage> mBayerFrames = new ArrayDeque<ReprocessableImage>(
+ NUM_IMAGES_TO_BURST);
+ private ArrayDeque<ReprocessableImage> mMonoFrames = new ArrayDeque<ReprocessableImage>(
+ NUM_IMAGES_TO_BURST);
+ private ArrayDeque<TotalCaptureResult> mBayerCaptureResults = new ArrayDeque<TotalCaptureResult>(
+ NUM_IMAGES_TO_BURST);
+ private ArrayDeque<TotalCaptureResult> mMonoCaptureResults = new ArrayDeque<TotalCaptureResult>(
+ NUM_IMAGES_TO_BURST);
+ private ArrayDeque<Image> mBayerImages = new ArrayDeque<Image>(
+ NUM_IMAGES_TO_BURST);
+ private ArrayDeque<Image> mMonoImages = new ArrayDeque<Image>(
+ NUM_IMAGES_TO_BURST);
+ private int[] mNumImagesToProcess = new int[MAX_NUM_CAM];
+
+ public ImageProcessHandler(Looper looper) {
+ super(looper);
+ }
@Override
- public void onImageAvailable(ImageReader reader) {
- Log.d(TAG, "image available");
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_START_CAPTURE:
+ mNumImagesToProcess[msg.arg1] = msg.arg2;
+ break;
+ case MSG_NEW_IMG:
+ processNewImg(msg);
+ break;
+ case MSG_NEW_RESULT:
+ processNewCaptureResult(msg);
+ break;
+ case MSG_SAVE:
+ processSaveImage(msg);
+ break;
+ }
+ }
+
+ private void processNewImg(Message msg) {
+ Image image = (Image) msg.obj;
+
+ ArrayDeque<Image> imageQueue;
+ ArrayDeque<TotalCaptureResult> resultQueue;
+ ArrayDeque<ReprocessableImage> reprocQueue;
+ // push image onto queue
+ if (msg.arg1 == BAYER_ID) {
+ imageQueue = mBayerImages;
+ resultQueue = mBayerCaptureResults;
+ reprocQueue = mBayerFrames;
+ } else {
+ imageQueue = mMonoImages;
+ resultQueue = mMonoCaptureResults;
+ reprocQueue = mMonoFrames;
+ }
+
+ imageQueue.add(image);
+
+ Log.d(TAG, "processNewImg - cam: " + msg.arg1 + " num imgs: "
+ + imageQueue.size() + " num results: " + resultQueue.size());
+ if (imageQueue.isEmpty() == resultQueue.isEmpty()) {
+ Image headImage = imageQueue.poll();
+ TotalCaptureResult headResult = resultQueue.poll();
+ reprocQueue.add(new ReprocessableImage(headImage, headResult));
+ checkForValidFramePair();
+ mNumImagesToProcess[msg.arg1]--;
+ if (mNumImagesToProcess[BAYER_ID] == 0
+ && mNumImagesToProcess[MONO_ID] == 0) {
+ ClearSightNativeEngine.getInstance().reset();
+ processReprocess();
+ }
+ }
+ }
+
+ private void processNewCaptureResult(Message msg) {
+ if (msg.arg2 == 1) {
+ // capture failed
+ mNumImagesToProcess[msg.arg1]--;
+ } else {
+ TotalCaptureResult result = (TotalCaptureResult) msg.obj;
+ ArrayDeque<Image> imageQueue;
+ ArrayDeque<TotalCaptureResult> resultQueue;
+ ArrayDeque<ReprocessableImage> reprocQueue;
+ // push image onto queue
+ if (msg.arg1 == BAYER_ID) {
+ imageQueue = mBayerImages;
+ resultQueue = mBayerCaptureResults;
+ reprocQueue = mBayerFrames;
+ } else {
+ imageQueue = mMonoImages;
+ resultQueue = mMonoCaptureResults;
+ reprocQueue = mMonoFrames;
+ }
+
+ resultQueue.add(result);
+
+ Log.d(TAG, "processNewCaptureResult - cam: " + msg.arg1
+ + " num imgs: " + imageQueue.size() + " num results: "
+ + resultQueue.size());
+ if (imageQueue.isEmpty() == resultQueue.isEmpty()) {
+ Image headImage = imageQueue.poll();
+ TotalCaptureResult headResult = resultQueue.poll();
+ reprocQueue.add(new ReprocessableImage(headImage,
+ headResult));
+ checkForValidFramePair();
+ mNumImagesToProcess[msg.arg1]--;
+ if (mNumImagesToProcess[BAYER_ID] == 0
+ && mNumImagesToProcess[MONO_ID] == 0) {
+ ClearSightNativeEngine.getInstance().reset();
+ processReprocess();
+ }
+ }
+ }
+ }
+
+ private void checkForValidFramePair() {
+ // if we have images from both
+ // as we just added an image onto one of the queues
+ // this condition is only true when both are not empty
+ Log.d(TAG,
+ "checkForValidFramePair - num bayer frames: "
+ + mBayerFrames.size() + " num mono frames: "
+ + mMonoFrames.size());
+
+ if (mBayerFrames.isEmpty() == mMonoFrames.isEmpty()) {
+ // peek oldest pair of images
+ ReprocessableImage bayer = mBayerFrames.peek();
+ ReprocessableImage mono = mMonoFrames.peek();
+
+ Log.d(TAG,
+ "checkForValidFramePair - bayer ts: "
+ + bayer.mImage.getTimestamp() + " mono ts: "
+ + mono.mImage.getTimestamp());
+ Log.d(TAG,
+ "checkForValidFramePair - difference: "
+ + Math.abs(bayer.mImage.getTimestamp()
+ - mono.mImage.getTimestamp()));
+ // if timestamps are within threshold, keep frames
+ if (Math.abs(bayer.mImage.getTimestamp()
+ - mono.mImage.getTimestamp()) > TIMESTAMP_THRESHOLD_NS) {
+ Log.d(TAG, "checkForValidFramePair - toss pair");
+ // no match, toss
+ bayer = mBayerFrames.poll();
+ mono = mMonoFrames.poll();
+ bayer.mImage.close();
+ mono.mImage.close();
+ }
+ }
+ }
+
+ private void releaseBayerFrames() {
+ for (ReprocessableImage reprocImg : mBayerFrames) {
+ reprocImg.mImage.close();
+ }
+
+ mBayerFrames.clear();
+ }
+
+ private void releaseMonoFrames() {
+ for (ReprocessableImage reprocImg : mMonoFrames) {
+ reprocImg.mImage.close();
+ }
+
+ mMonoFrames.clear();
+ }
+
+ private void processReprocess() {
+ if (mBayerFrames.size() != mMonoFrames.size()
+ || mBayerFrames.isEmpty()) {
+ Log.d(TAG, "processReprocess - frame size mismatch or empty");
+ releaseBayerFrames();
+ releaseMonoFrames();
+ mActivity.runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ RotateTextToast.makeText(mActivity, R.string.clearsight_capture_fail,
+ Toast.LENGTH_SHORT).show();
+ unlockFocus(BAYER_ID);
+ unlockFocus(MONO_ID);
+ }
+ });
+ return;
+ } else {
+ sendReprocessRequests(BAYER_ID);
+ sendReprocessRequests(MONO_ID);
+ }
+ }
+
+ private void sendReprocessRequests(final int camId) {
+ try {
+ ArrayDeque<ReprocessableImage> frameQueue;
+ if (camId == BAYER_ID) {
+ frameQueue = mBayerFrames;
+ } else {
+ frameQueue = mMonoFrames;
+ }
+ Log.d(TAG, "sendReprocessRequests - start cam: " + camId
+ + " num frames: " + frameQueue.size());
+
+ ArrayList<CaptureRequest> reprocRequests = new ArrayList<CaptureRequest>(
+ frameQueue.size());
+ while (!frameQueue.isEmpty()) {
+ ReprocessableImage reprocImg = frameQueue.poll();
+
+ CaptureRequest.Builder reprocRequest = mCameraDevice[camId]
+ .createReprocessCaptureRequest(reprocImg.mCaptureResult);
+ reprocRequest.addTarget(mReprocessImageReader[camId]
+ .getSurface());
+ reprocRequests.add(reprocRequest.build());
+
+ mImageWriter[camId].queueInputImage(reprocImg.mImage);
+ }
+
+ mImageReprocessHandler.obtainMessage(MSG_START_CAPTURE, camId,
+ reprocRequests.size()).sendToTarget();
+ mCaptureSession[camId].captureBurst(reprocRequests,
+ new CameraCaptureCallback(camId) {
+ @Override
+ public void onCaptureCompleted(
+ CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
+ super.onCaptureCompleted(session, request, result);
+ Log.d(TAG, "reprocess - onCaptureCompleted: "
+ + mCamId);
+ // TODO: parse OTP Calib data to be used in final CS
+ // result.get(OTP_CALIB_BLOB);
+ }
+
+ @Override
+ public void onCaptureFailed(
+ CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureFailure failure) {
+ super.onCaptureFailed(session, request, failure);
+ Log.d(TAG, "reprocess - onCaptureFailed: "
+ + mCamId);
+ mImageReprocessHandler.obtainMessage(
+ MSG_NEW_RESULT, mCamId, 1)
+ .sendToTarget();
+ }
+ }, mCameraHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void processSaveImage(Message msg) {
+ Image image = (Image) msg.obj;
+ if (image.getFormat() == ImageFormat.JPEG) {
+ mCaptureStartTime = System.currentTimeMillis();
+ mNamedImages.nameNewImage(mCaptureStartTime);
+ NamedEntity name = mNamedImages.getNextNameEntity();
+ String title = (name == null) ? null : name.title;
+ long date = (name == null) ? -1 : name.date;
+
+ ByteBuffer buffer = image.getPlanes()[0].getBuffer();
+ byte[] bytes = new byte[buffer.remaining()];
+ buffer.get(bytes);
+
+ mActivity.getMediaSaveService().addImage(bytes, title, date,
+ null, image.getWidth(), image.getHeight(), 0, null,
+ mOnMediaSavedListener, mContentResolver, "jpeg");
+ } else {
+ Log.w(TAG, "processSaveImage - image format incorrect: " + image.getFormat());
+ }
+ image.close();
+ }
+ };
+
+ private class ImageReprocessHandler extends Handler {
+ private int[] mNumImagesToProcess = new int[MAX_NUM_CAM];
+
+ public ImageReprocessHandler(Looper looper) {
+ super(looper);
+ }
+
+ @Override
+ public void handleMessage(Message msg) {
+ switch (msg.what) {
+ case MSG_START_CAPTURE:
+ mNumImagesToProcess[msg.arg1] = msg.arg2;
+ break;
+ case MSG_NEW_IMG:
+ processNewImg(msg);
+ break;
+ case MSG_NEW_RESULT:
+ processNewCaptureResult(msg);
+ break;
+ }
+ }
+
+ private void processNewImg(Message msg) {
+ Image image = (Image) msg.obj;
+ boolean isBayer = (msg.arg1 == BAYER_ID);
+
+ Log.d(TAG, "reprocess - processNewImg");
+ if (!ClearSightNativeEngine.getInstance()
+ .hasReferenceImage(isBayer)) {
+ // reference not yet set
+ ClearSightNativeEngine.getInstance().setReferenceImage(isBayer,
+ image);
+ } else {
+ // if ref images set, register this image
+ ClearSightNativeEngine.getInstance().registerImage(isBayer,
+ image);
+ }
+
+ mNumImagesToProcess[msg.arg1]--;
+
+ Log.d(TAG, "reprocess - processNewImg, cam: " + msg.arg1
+ + " count: " + mNumImagesToProcess[msg.arg1]);
+
+ if (mNumImagesToProcess[BAYER_ID] == 0
+ && mNumImagesToProcess[MONO_ID] == 0) {
+ processClearSight();
+ }
+ }
+
+ private void processNewCaptureResult(Message msg) {
+ if (msg.arg2 == 1) {
+ // capture failed
+ mNumImagesToProcess[msg.arg1]--;
+ }
+
+ Log.d(TAG, "reprocess - processNewCaptureResult, cam: " + msg.arg1
+ + " count: " + mNumImagesToProcess[msg.arg1]);
+
+ if (mNumImagesToProcess[BAYER_ID] == 0
+ && mNumImagesToProcess[MONO_ID] == 0) {
+ processClearSight();
+ }
+ }
+
+ private void processClearSight() {
+ Log.d(TAG, "reprocess - processClearSight, bayercount: "
+ + mNumImagesToProcess[BAYER_ID] + " mono count: "
+ + mNumImagesToProcess[MONO_ID]);
+
mCaptureStartTime = System.currentTimeMillis();
mNamedImages.nameNewImage(mCaptureStartTime);
NamedEntity name = mNamedImages.getNextNameEntity();
String title = (name == null) ? null : name.title;
long date = (name == null) ? -1 : name.date;
- Image mImage = reader.acquireNextImage();
- ByteBuffer buffer = mImage.getPlanes()[0].getBuffer();
- //Todo: dont create new buffer and use the one from ImageReader
- byte[] bytes = new byte[buffer.remaining()];
- buffer.get(bytes);
+ ClearSightNativeEngine.ClearsightImage csImage = ClearSightNativeEngine
+ .getInstance().processImage();
+ if (csImage != null) {
+ Log.d(TAG, "reprocess - processClearSight success");
+
+ mActivity.getMediaSaveService().addMpoImage(
+ csImage,
+ createYuvImage(ClearSightNativeEngine.getInstance()
+ .getReferenceImage(true)),
+ createYuvImage(ClearSightNativeEngine.getInstance()
+ .getReferenceImage(false)), null, null, title,
+ date, null, 0, mOnMediaSavedListener, mContentResolver,
+ "jpeg");
- mActivity.getMediaSaveService().addImage(
- bytes, title, date, null, reader.getWidth(), reader.getHeight(),
- 0, null, mOnMediaSavedListener, mContentResolver, "jpeg");
- mImage.close();
+ mActivity.runOnUiThread(new Runnable() {
+ @Override
+ public void run() {
+ RotateTextToast.makeText(mActivity, R.string.clearsight_capture_success,
+ Toast.LENGTH_SHORT).show();
+ }
+ });
+ } else {
+ Log.d(TAG, "reprocess - processClearSight fail");
+ Image bayerRef = ClearSightNativeEngine.getInstance()
+ .getReferenceImage(true);
+ Image monoRef = ClearSightNativeEngine.getInstance()
+ .getReferenceImage(false);
+ if (bayerRef != null && monoRef != null) {
+ Log.d(TAG, "reprocess - saving with bayer + mono mpo");
+ mActivity.getMediaSaveService().addMpoImage(null,
+ createYuvImage(bayerRef), createYuvImage(monoRef),
+ null, null, title, date, null, 0,
+ mOnMediaSavedListener, mContentResolver, "jpeg");
+ } else {
+ Log.d(TAG, "reprocess - bayer + mono images not available");
+ }
}
+ unlockFocus(BAYER_ID);
+ unlockFocus(MONO_ID);
+ ClearSightNativeEngine.getInstance().reset();
+ }
};
+
+ private void saveDebugImage(byte[] data, int width, int height,
+ boolean isReproc) {
+ mCaptureStartTime = System.currentTimeMillis();
+ mNamedImages.nameNewImage(mCaptureStartTime);
+ NamedEntity name = mNamedImages.getNextNameEntity();
+ String title = (name == null) ? null : name.title;
+ long date = (name == null) ? -1 : name.date;
+
+ if (isReproc) {
+ title += "_reproc";
+ }
+
+ mActivity.getMediaSaveService().addImage(data, title, date, null,
+ width, height, 0, null, mOnMediaSavedListener,
+ mContentResolver, "jpeg");
+ }
+
+ private void saveDebugImage(YuvImage image, boolean isReproc) {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ image.compressToJpeg(
+ new Rect(0, 0, image.getWidth(), image.getHeight()), 100, baos);
+
+ saveDebugImage(baos.toByteArray(), image.getWidth(), image.getHeight(),
+ isReproc);
+ }
+
+ private void saveDebugImage(Image image, boolean isReproc) {
+ saveDebugImage(createYuvImage(image), isReproc);
+ }
+
+ private YuvImage createYuvImage(Image image) {
+ if (image == null) {
+ Log.d(TAG, "createYuvImage - invalid param");
+ return null;
+ }
+ Plane[] planes = image.getPlanes();
+ ByteBuffer yBuffer = planes[0].getBuffer();
+ ByteBuffer vuBuffer = planes[2].getBuffer();
+ int sizeY = yBuffer.capacity();
+ int sizeVU = vuBuffer.capacity();
+ byte[] data = new byte[sizeY + sizeVU];
+ yBuffer.rewind();
+ yBuffer.get(data, 0, sizeY);
+ vuBuffer.rewind();
+ vuBuffer.get(data, sizeY, sizeVU);
+ int[] strides = new int[] { planes[0].getRowStride(),
+ planes[2].getRowStride() };
+
+ return new YuvImage(data, ImageFormat.NV21, image.getWidth(),
+ image.getHeight(), strides);
+ }
+
/**
* {@link CaptureRequest.Builder} for the camera preview
*/
@@ -417,6 +882,7 @@ public class CaptureModule implements CameraModule, PhotoController,
switch (value) {
case "dual":
mode = DUAL_MODE;
+ ClearSightNativeEngine.createInstance();
break;
case "bayer":
mode = BAYER_MODE;
@@ -518,40 +984,62 @@ public class CaptureModule implements CameraModule, PhotoController,
mPreviewRequestBuilder[id].setTag(id);
mPreviewRequestBuilder[id].addTarget(surface);
- list.add(surface);
- list.add(mImageReader[id].getSurface());
- // Here, we create a CameraCaptureSession for camera preview.
- mCameraDevice[id].createCaptureSession(list,
+ CameraCaptureSession.StateCallback captureSessionCallback =
new CameraCaptureSession.StateCallback() {
- @Override
- public void onConfigured(CameraCaptureSession cameraCaptureSession) {
- // The camera is already closed
- if (mPaused || null == mCameraDevice[id]) {
- return;
- }
- // When the session is ready, we start displaying the preview.
- mCaptureSession[id] = cameraCaptureSession;
- initializePreviewConfiguration(id);
- try {
- if (MODE == DUAL_MODE) {
- linkBayerMono(id);
- mIsLinked = true;
- }
- // Finally, we start displaying the camera preview.
- mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
- .build(), mCaptureCallback, mCameraHandler);
- } catch (CameraAccessException e) {
- e.printStackTrace();
- }
+ @Override
+ public void onConfigured(CameraCaptureSession cameraCaptureSession) {
+ // The camera is already closed
+ if (mPaused || null == mCameraDevice[id]) {
+ return;
+ }
+ // When the session is ready, we start displaying the preview.
+ mCaptureSession[id] = cameraCaptureSession;
+ initializePreviewConfiguration(id);
+ try {
+ if (MODE == DUAL_MODE) {
+ linkBayerMono(id);
+ mIsLinked = true;
}
+ // Finally, we start displaying the camera preview.
+ mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
+ .build(), mCaptureCallback, mCameraHandler);
- @Override
- public void onConfigureFailed(
- CameraCaptureSession cameraCaptureSession) {
+ // For Clearsight
+ if(mCaptureSession[id].isReprocessable()) {
+ mImageWriter[id] = ImageWriter.newInstance(cameraCaptureSession.getInputSurface(), NUM_IMAGES_TO_BURST);
}
- }, null
- );
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
+ Log.d(TAG, "cameracapturesession - onConfigureFailed");
+ }
+
+ @Override
+ public void onClosed(CameraCaptureSession session) {
+ Log.d(TAG, "cameracapturesession - onClosed");
+ }
+ };
+
+ list.add(surface);
+ list.add(mImageReader[id].getSurface());
+
+ ListPreference pref = mPreferenceGroup.findPreference(CameraSettings.KEY_CLEARSIGHT);
+ if(pref.getValue().equals(mActivity.getString(R.string.pref_camera_clearsight_value_on))) {
+ list.add(mReprocessImageReader[id].getSurface());
+ // Here, we create a CameraCaptureSession for camera preview.
+ mCameraDevice[id].createReprocessableCaptureSession(
+ new InputConfiguration(mImageReader[id].getWidth(),
+ mImageReader[id].getHeight(), mImageReader[id].getImageFormat()),
+ list, captureSessionCallback, null);
+ } else {
+ // Here, we create a CameraCaptureSession for camera preview.
+ mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null);
+ }
} catch (CameraAccessException e) {
}
}
@@ -687,40 +1175,98 @@ public class CaptureModule implements CameraModule, PhotoController,
if (null == mActivity || null == mCameraDevice[id]) {
return;
}
- CaptureRequest.Builder captureBuilder =
- mCameraDevice[id].createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+
+ ListPreference pref = mPreferenceGroup.findPreference(CameraSettings.KEY_CLEARSIGHT);
+ final boolean csEnabled = pref.getValue().equals(
+ mActivity.getString(R.string.pref_camera_clearsight_value_on));
+ CaptureRequest.Builder captureBuilder;
+ CameraCaptureSession.CaptureCallback captureCallback;
+
+ if(csEnabled) {
+ captureBuilder = mCameraDevice[id].createCaptureRequest(CameraDevice.TEMPLATE_ZERO_SHUTTER_LAG);
+
+ // Orientation
+ // int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
+ // captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
+
+ captureCallback = new CameraCaptureSession.CaptureCallback() {
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
+ Log.d(TAG, "captureStillPicture onCaptureCompleted: " + id);
+ result.dumpToLog();
+ mImageProcessHandler.obtainMessage(MSG_NEW_RESULT,
+ id, 0, result).sendToTarget();
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureFailure result) {
+ Log.d(TAG, "captureStillPicture onCaptureFailed: " + id);
+ mImageProcessHandler.obtainMessage(MSG_NEW_RESULT,
+ id, 1, result).sendToTarget();
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraCaptureSession session, int
+ sequenceId, long frameNumber) {
+ Log.d(TAG, "captureStillPicture onCaptureSequenceCompleted: " + id);
+ }
+ };
+ } else {
+ // No Clearsight
+ captureBuilder = mCameraDevice[id].createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
+
+ // Orientation
+ int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
+ captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
+ captureCallback = new CameraCaptureSession.CaptureCallback() {
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
+ Log.d(TAG, "captureStillPicture onCaptureCompleted: " + id);
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureFailure result) {
+ Log.d(TAG, "captureStillPicture onCaptureFailed: " + id);
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraCaptureSession session, int
+ sequenceId, long frameNumber) {
+ Log.d(TAG, "captureStillPicture onCaptureSequenceCompleted: " + id);
+ unlockFocus(id);
+ }
+ };
+ }
captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
captureBuilder.addTarget(getPreviewSurface(id));
captureBuilder.addTarget(mImageReader[id].getSurface());
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_AUTO);
- captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest
- .CONTROL_AF_TRIGGER_IDLE);
+ captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
applyCaptureSettings(captureBuilder, id);
- // Orientation
- int rotation = mActivity.getWindowManager().getDefaultDisplay().getRotation();
- captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
-
- CameraCaptureSession.CaptureCallback CaptureCallback
- = new CameraCaptureSession.CaptureCallback() {
-
- @Override
- public void onCaptureCompleted(CameraCaptureSession session,
- CaptureRequest request,
- TotalCaptureResult result) {
- Log.d(TAG, "captureStillPicture onCaptureCompleted");
- }
+ mCaptureSession[id].stopRepeating();
- @Override
- public void onCaptureSequenceCompleted(CameraCaptureSession session, int
- sequenceId, long frameNumber) {
- Log.d(TAG, "captureStillPicture onCaptureSequenceCompleted");
- unlockFocus(id);
+ if(csEnabled) {
+ List<CaptureRequest> burstList = new ArrayList<CaptureRequest>();
+ for (int i = 0; i < NUM_IMAGES_TO_BURST; i++) {
+ burstList.add(captureBuilder.build());
}
- };
- mCaptureSession[id].stopRepeating();
- mCaptureSession[id].capture(captureBuilder.build(), CaptureCallback, mCallbackHandler);
+ mImageProcessHandler.obtainMessage(MSG_START_CAPTURE, id, burstList.size()).sendToTarget();
+ mCaptureSession[id].captureBurst(burstList, captureCallback, mCallbackHandler);
+ } else {
+ mCaptureSession[id].capture(captureBuilder.build(), captureCallback, mCallbackHandler);
+ }
} catch (CameraAccessException e) {
Log.d(TAG, "Capture still picture has failed");
e.printStackTrace();
@@ -766,7 +1312,7 @@ public class CaptureModule implements CameraModule, PhotoController,
for (int i = 0; i < cameraIdList.length; i++) {
String cameraId = cameraIdList[i];
CameraCharacteristics characteristics
- = manager.getCameraCharacteristics(cameraId);
+ = manager.getCameraCharacteristics(cameraId);
mCharacteristics[i] = characteristics;
mCharacteristicsIndex.add(i);
StreamConfigurationMap map = characteristics.get(
@@ -776,16 +1322,54 @@ public class CaptureModule implements CameraModule, PhotoController,
}
Boolean available = characteristics.get(CameraCharacteristics.FLASH_INFO_AVAILABLE);
Log.d(TAG, "flash : " + (available == null ? false : available));
- // For still image captures, we use the largest available size.
- Size largest = Collections.max(
- Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
- new CompareSizesByArea());
-
- mImageReader[i] = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
- ImageFormat.JPEG, 3);
- mImageReader[i].setOnImageAvailableListener(
- mOnImageAvailableListener, mImageAvailableHandler);
mCameraId[i] = cameraId;
+
+ ListPreference pref = mPreferenceGroup.findPreference(CameraSettings.KEY_CLEARSIGHT);
+ if(pref.getValue().equals(mActivity.getString(R.string.pref_camera_clearsight_value_on))) {
+ // For still image captures, we use the largest available size.
+ Size largest = Collections.max(
+ Arrays.asList(map.getOutputSizes(ImageFormat.YUV_420_888)),
+ new CompareSizesByArea());
+
+ mImageReader[i] = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
+ ImageFormat.YUV_420_888, NUM_IMAGES_TO_BURST);
+ mImageReader[i].setOnImageAvailableListener(new ImageAvailableListener(i) {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Log.d(TAG, "image available for cam: " + mCamId);
+ mImageProcessHandler.obtainMessage(
+ MSG_NEW_IMG, mCamId, 0, reader.acquireNextImage()).sendToTarget();
+ }
+ }, mImageAvailableHandler);
+
+ mReprocessImageReader[i] = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
+ ImageFormat.YUV_420_888, NUM_IMAGES_TO_BURST);
+ mReprocessImageReader[i].setOnImageAvailableListener(new ImageAvailableListener(i) {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Log.d(TAG, "reprocessed image available for cam: " + mCamId);
+ mImageReprocessHandler.obtainMessage(
+ MSG_NEW_IMG, mCamId, 0, reader.acquireNextImage()).sendToTarget();
+ }
+ }, mImageAvailableHandler);
+ } else {
+ // No Clearsight
+ // For still image captures, we use the largest available size.
+ Size largest = Collections.max(
+ Arrays.asList(map.getOutputSizes(ImageFormat.JPEG)),
+ new CompareSizesByArea());
+
+ mImageReader[i] = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
+ ImageFormat.JPEG, 3);
+ mImageReader[i].setOnImageAvailableListener(new ImageAvailableListener(i) {
+ @Override
+ public void onImageAvailable(ImageReader reader) {
+ Log.d(TAG, "image available for cam: " + mCamId);
+ mImageProcessHandler.obtainMessage(
+ MSG_SAVE, mCamId, 0, reader.acquireNextImage()).sendToTarget();
+ }
+ }, mImageAvailableHandler);
+ }
}
mAutoFocusSupported = CameraUtil.isAutoFocusSupported(mCharacteristics,
mCharacteristicsIndex);
@@ -851,6 +1435,14 @@ public class CaptureModule implements CameraModule, PhotoController,
mImageReader[i].close();
mImageReader[i] = null;
}
+ if (null != mReprocessImageReader[i]) {
+ mReprocessImageReader[i].close();
+ mReprocessImageReader[i] = null;
+ }
+ if (null != mImageWriter[i]) {
+ mImageWriter[i].close();
+ mImageWriter[i] = null;
+ }
}
for (int i = 0; i < MAX_NUM_CAM; i++) {
if (null != mCameraDevice[i]) {
@@ -883,10 +1475,16 @@ public class CaptureModule implements CameraModule, PhotoController,
mImageAvailableThread.start();
mCallbackThread = new HandlerThread("CameraCallback");
mCallbackThread.start();
+ mImageProcessThread = new HandlerThread("CameraImageProcess");
+ mImageProcessThread.start();
+ mImageReprocessThread = new HandlerThread("CameraImageReprocess");
+ mImageReprocessThread.start();
mCameraHandler = new MyCameraHandler(mCameraThread.getLooper());
mImageAvailableHandler = new Handler(mImageAvailableThread.getLooper());
mCallbackHandler = new Handler(mCallbackThread.getLooper());
+ mImageProcessHandler = new ImageProcessHandler(mImageProcessThread.getLooper());
+ mImageReprocessHandler = new ImageReprocessHandler(mImageReprocessThread.getLooper());
}
/**
@@ -896,6 +1494,8 @@ public class CaptureModule implements CameraModule, PhotoController,
mCameraThread.quitSafely();
mImageAvailableThread.quitSafely();
mCallbackThread.quitSafely();
+ mImageProcessThread.quitSafely();
+ mImageReprocessThread.quitSafely();
try {
mCameraThread.join();
mCameraThread = null;
@@ -917,6 +1517,20 @@ public class CaptureModule implements CameraModule, PhotoController,
} catch (InterruptedException e) {
e.printStackTrace();
}
+ try {
+ mImageProcessThread.join();
+ mImageProcessThread = null;
+ mImageProcessHandler = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ try {
+ mImageReprocessThread.join();
+ mImageReprocessThread = null;
+ mImageReprocessHandler = null;
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
}
private void openCamera(int id) {
@@ -955,8 +1569,8 @@ public class CaptureModule implements CameraModule, PhotoController,
Log.d(TAG, "onPause");
mUI.showPreviewCover();
if (mLocationManager != null) mLocationManager.recordLocation(false);
- stopBackgroundThread();
closeCamera();
+ stopBackgroundThread();
mUI.onPause();
}
@@ -969,9 +1583,9 @@ public class CaptureModule implements CameraModule, PhotoController,
public void onResumeAfterSuper() {
Log.d(TAG, "onResume " + MODE);
mCharacteristicsIndex = new ArrayList<>();
+ startBackgroundThread();
setUpCameraOutputs();
readInitialValues();
- startBackgroundThread();
Message msg = Message.obtain();
msg.what = OPEN_CAMERA;
switch (MODE) {
diff --git a/src/com/android/camera/MediaSaveService.java b/src/com/android/camera/MediaSaveService.java
index 3e764a2c6..d496310c8 100644
--- a/src/com/android/camera/MediaSaveService.java
+++ b/src/com/android/camera/MediaSaveService.java
@@ -16,11 +16,18 @@
package com.android.camera;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.nio.ByteOrder;
+
import android.app.Service;
import android.content.ContentResolver;
import android.content.ContentValues;
import android.content.Intent;
import android.graphics.BitmapFactory;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.hardware.camera2.TotalCaptureResult;
import android.location.Location;
import android.net.Uri;
import android.os.AsyncTask;
@@ -32,6 +39,10 @@ import com.android.camera.PhotoModule;
import com.android.camera.exif.ExifInterface;
import java.io.File;
+import com.android.camera.mpo.MpoData;
+import com.android.camera.mpo.MpoImageData;
+import com.android.camera.mpo.MpoInterface;
+import com.android.camera.util.ClearSightNativeEngine.ClearsightImage;
/*
* Service for saving images in the background thread.
@@ -85,6 +96,31 @@ public class MediaSaveService extends Service {
return (mMemoryUse >= SAVE_TASK_MEMORY_LIMIT);
}
+ public void addMpoImage(final ClearsightImage csImage,
+ final YuvImage bayerImg, final YuvImage monoImg,
+ TotalCaptureResult bayerResult, TotalCaptureResult monoResult,
+ String title, long date, Location loc, int orientation,
+ OnMediaSavedListener l, ContentResolver resolver,
+ String pictureFormat) {
+ if (isQueueFull()) {
+ Log.e(TAG, "Cannot add image when the queue is full");
+ return;
+ }
+
+ MpoSaveTask t = new MpoSaveTask(csImage, bayerImg, monoImg,
+ bayerResult, monoResult, title, date, loc, orientation, l,
+ resolver, pictureFormat);
+
+ long size = (csImage == null ? 0
+ : csImage.getDataLength())
+ + bayerImg.getYuvData().length + monoImg.getYuvData().length;
+ mMemoryUse += size;
+ if (isQueueFull()) {
+ onQueueFull();
+ }
+ t.execute();
+ }
+
public void addImage(final byte[] data, String title, long date, Location loc,
int width, int height, int orientation, ExifInterface exif,
OnMediaSavedListener l, ContentResolver resolver, String pictureFormat) {
@@ -139,6 +175,97 @@ public class MediaSaveService extends Service {
if (mListener != null) mListener.onQueueStatus(false);
}
+ private class MpoSaveTask extends AsyncTask<Void, Void, Uri> {
+ private ClearsightImage csImage;
+ private YuvImage bayerImage;
+ private YuvImage monoImage;
+ private String title;
+ private long date;
+ private Location loc;
+ private int width, height;
+ private int orientation;
+ private TotalCaptureResult bayerResult;
+ private TotalCaptureResult monoResult;
+ private ContentResolver resolver;
+ private OnMediaSavedListener listener;
+ private String pictureFormat;
+
+ public MpoSaveTask(ClearsightImage csImage, YuvImage bayerImg,
+ YuvImage monoImg, TotalCaptureResult bayerResult,
+ TotalCaptureResult monoResult, String title, long date,
+ Location loc, int orientation, OnMediaSavedListener listener,
+ ContentResolver resolver, String pictureFormat) {
+ this.csImage = csImage;
+ this.bayerImage = bayerImg;
+ this.monoImage = monoImg;
+ this.title = title;
+ this.date = date;
+ this.loc = loc;
+ this.width = bayerImg.getWidth();
+ this.height = bayerImg.getHeight();
+ this.orientation = orientation;
+ this.bayerResult = bayerResult;
+ this.monoResult = monoResult;
+ this.resolver = resolver;
+ this.listener = listener;
+ this.pictureFormat = pictureFormat;
+ }
+
+ @Override
+ protected Uri doInBackground(Void... v) {
+ // encode jpeg and add exif for all images
+ MpoData mpo = new MpoData();
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ bayerImage.compressToJpeg(new Rect(0, 0, bayerImage.getWidth(),
+ bayerImage.getHeight()), 100, baos);
+ MpoImageData bayer = new MpoImageData(baos.toByteArray(),
+ ByteOrder.BIG_ENDIAN);
+
+ baos.reset();
+ monoImage.compressToJpeg(new Rect(0, 0, monoImage.getWidth(),
+ monoImage.getHeight()), 100, baos);
+ MpoImageData mono = new MpoImageData(baos.toByteArray(),
+ ByteOrder.BIG_ENDIAN);
+
+ if (csImage == null) {
+ mpo.addAuxiliaryMpoImage(mono);
+ mpo.setPrimaryMpoImage(bayer);
+ } else {
+ MpoImageData cs = new MpoImageData(csImage.compressToJpeg(),
+ ByteOrder.BIG_ENDIAN);
+
+ mpo.addAuxiliaryMpoImage(bayer);
+ mpo.addAuxiliaryMpoImage(mono);
+ mpo.setPrimaryMpoImage(cs);
+ }
+
+ // combine to single mpo
+ String path = Storage.generateFilepath(title, pictureFormat);
+ int size = MpoInterface.writeMpo(mpo, path);
+ // Try to get the real image size after add exif.
+ File f = new File(path);
+ if (f.exists() && f.isFile()) {
+ size = (int) f.length();
+ }
+ return Storage.addImage(resolver, title, date, loc, orientation,
+ size, path, width, height, pictureFormat);
+ }
+
+ @Override
+ protected void onPostExecute(Uri uri) {
+ if (listener != null)
+ listener.onMediaSaved(uri);
+ boolean previouslyFull = isQueueFull();
+ long size = (csImage == null ? 0
+ : csImage.getDataLength())
+ + bayerImage.getYuvData().length
+ + monoImage.getYuvData().length;
+ mMemoryUse -= size;
+ if (isQueueFull() != previouslyFull)
+ onQueueAvailable();
+ }
+ }
+
private class ImageSaveTask extends AsyncTask <Void, Void, Uri> {
private byte[] data;
private String title;
diff --git a/src/com/android/camera/util/ClearSightNativeEngine.java b/src/com/android/camera/util/ClearSightNativeEngine.java
new file mode 100644
index 000000000..cf0a740d7
--- /dev/null
+++ b/src/com/android/camera/util/ClearSightNativeEngine.java
@@ -0,0 +1,351 @@
+/*
+ * Copyright (c) 2016, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.util;
+
+import java.io.ByteArrayOutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.media.Image;
+import android.media.Image.Plane;
+import android.util.Log;
+
+public class ClearSightNativeEngine {
+ private static final String TAG = "ClearSightNativeEngine";
+ static {
+ try {
+ System.loadLibrary("jni_clearsight");
+ mLibLoaded = true;
+ Log.v(TAG, "successfully loaded clearsight lib");
+ } catch (UnsatisfiedLinkError e) {
+ Log.e(TAG, "failed to load clearsight lib");
+ e.printStackTrace();
+ mLibLoaded = false;
+ }
+ }
+
+ private static final int METADATA_SIZE = 5;
+ private static final int Y_PLANE = 0;
+ private static final int VU_PLANE = 2;
+
+ // dummy OTP calib data
+ private static final String otp_calib = "Calibration OTP format version = 10301\n"
+ + "Main Native Sensor Resolution width = 4224px\n"
+ + "Main Native Sensor Resolution height = 3136px\n"
+ + "Main Calibration Resolution width = 1280px\n"
+ + "Main Calibration Resolution height = 950px\n"
+ + "Main Focal length ratio = 1.004896\n"
+ + "Aux Native Sensor Resolution width = 1600px\n"
+ + "Aux Native Sensor Resolution height = 1200px\n"
+ + "Aux Calibration Resolution width = 1280px\n"
+ + "Aux Calibration Resolution height = 960px\n"
+ + "Aux Focal length ratio = 1.000000\n"
+ + "Relative Rotation matrix [0] through [8] = 1.000000,-0.003008,0.000251,0.003073,1.000189,0.003329,0.019673,-0.003329,1.000284\n"
+ + "Relative Geometric surface parameters [0] through [31] = -0.307164,-0.879074,4.636152,0.297486,-0.157539,-6.889396,0.109467,-2.797022,-0.066306,-0.120142,0.196464,0.021974,2.905827,0.241197,0.048328,-5.116615,0.496533,-5.263813,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000,0.000000\n"
+ + "Relative Principal point X axis offset (ox) = 0.000000px\n"
+ + "Relative Principal point Y axis offset (oy) = 0.000000px\n"
+ + "Relative position flag = 1\n"
+ + "Baseline distance = 20.000000mm\n"
+ + "Main sensor mirror and flip setting = 3\n"
+ + "Aux sensor mirror and flip setting = 3\n"
+ + "Module orientation during calibration = 0\n"
+ + "Rotation flag = 0\n"
+ + "Main Normalized Focal length = 1000.0px\n"
+ + "Aux Normalized Focal length = 1000.0px";
+
+ private static boolean mLibLoaded;
+ private static ClearSightNativeEngine mInstance;
+
+ private Image mRefColorImage;
+ private Image mRefMonoImage;
+ private ArrayList<SourceImage> mSrcColor = new ArrayList<SourceImage>();
+ private ArrayList<SourceImage> mSrcMono = new ArrayList<SourceImage>();
+
+ private ClearSightNativeEngine() {
+ }
+
+ public static void createInstance() {
+ if (mInstance == null) {
+ mInstance = new ClearSightNativeEngine();
+ }
+ }
+
+ public static ClearSightNativeEngine getInstance() {
+ createInstance();
+ return mInstance;
+ }
+
+ public boolean isLibLoaded() {
+ return mLibLoaded;
+ }
+
+ public void reset() {
+ mSrcColor.clear();
+ mSrcMono.clear();
+ setReferenceColorImage(null);
+ setReferenceMonoImage(null);
+ }
+
+ public void setReferenceImage(boolean color, Image image) {
+ if (color)
+ setReferenceColorImage(image);
+ else
+ setReferenceMonoImage(image);
+ }
+
+ private void setReferenceColorImage(Image reference) {
+ if (mRefColorImage != null) {
+ mRefColorImage.close();
+ mRefColorImage = null;
+ }
+
+ mRefColorImage = reference;
+
+ if (mRefColorImage != null) {
+ Log.e(TAG,
+ "setRefColorImage - isdirectbuff: "
+ + mRefColorImage.getPlanes()[0].getBuffer()
+ .isDirect());
+ mSrcColor.add(new SourceImage(mRefColorImage.getPlanes()[Y_PLANE]
+ .getBuffer(), mRefColorImage.getPlanes()[VU_PLANE]
+ .getBuffer(), new int[] { 0, 0, 0, 0, 0 }));
+ }
+ }
+
+ private void setReferenceMonoImage(Image reference) {
+ if (mRefMonoImage != null) {
+ mRefMonoImage.close();
+ mRefMonoImage = null;
+ }
+
+ mRefMonoImage = reference;
+
+ if (mRefMonoImage != null) {
+ Log.e(TAG,
+ "setRefMonoImage - isdirectbuff: "
+ + mRefMonoImage.getPlanes()[0].getBuffer()
+ .isDirect());
+ mSrcMono.add(new SourceImage(mRefMonoImage.getPlanes()[Y_PLANE]
+ .getBuffer(), null, new int[] { 0, 0, 0, 0, 0 }));
+ }
+ }
+
+ public boolean hasReferenceImage(boolean color) {
+ return !(color ? mSrcColor.isEmpty() : mSrcMono.isEmpty());
+ }
+
+ public Image getReferenceImage(boolean color) {
+ return color ? mRefColorImage : mRefMonoImage;
+ }
+
+ public boolean registerImage(boolean color, Image image) {
+ return (color ? registerColorImage(image) : registerMonoImage(image));
+ }
+
+ private boolean registerColorImage(Image image) {
+ if (mSrcColor.isEmpty()) {
+ Log.w(TAG, "reference color image not yet set");
+ return false;
+ }
+
+ Plane[] planes = image.getPlanes();
+ ByteBuffer refY = mRefColorImage.getPlanes()[Y_PLANE].getBuffer();
+ ByteBuffer refVU = mRefColorImage.getPlanes()[VU_PLANE].getBuffer();
+ ByteBuffer regY = ByteBuffer.allocateDirect(refY.capacity());
+ ByteBuffer regVU = ByteBuffer.allocateDirect(refVU.capacity());
+ int[] metadata = new int[METADATA_SIZE];
+
+ boolean result = clearSightRegisterImage(refY,
+ planes[Y_PLANE].getBuffer(), planes[VU_PLANE].getBuffer(),
+ image.getWidth(), image.getHeight(),
+ planes[Y_PLANE].getRowStride(),
+ planes[VU_PLANE].getRowStride(), regY, regVU, metadata);
+
+ if (result) {
+ mSrcColor.add(new SourceImage(regY, regVU, metadata));
+ }
+
+ image.close();
+ return result;
+ }
+
+ private boolean registerMonoImage(Image image) {
+ if (mSrcMono.isEmpty()) {
+ Log.w(TAG, "reference mono image not yet set");
+ return false;
+ }
+
+ Plane[] planes = image.getPlanes();
+ ByteBuffer refY = mRefMonoImage.getPlanes()[Y_PLANE].getBuffer();
+ ByteBuffer regY = ByteBuffer.allocateDirect(refY.capacity());
+ int[] metadata = new int[METADATA_SIZE];
+
+ boolean result = clearSightRegisterImage(refY,
+ planes[Y_PLANE].getBuffer(), null, image.getWidth(),
+ image.getHeight(), planes[Y_PLANE].getRowStride(), 0, regY,
+ null, metadata);
+
+ if (result) {
+ mSrcMono.add(new SourceImage(regY, null, metadata));
+ }
+
+ image.close();
+ return result;
+ }
+
+ public ClearsightImage processImage() {
+ // check data validity
+ if (mSrcColor.size() != mSrcMono.size()) {
+ // mis-match in num images
+ Log.e(TAG, "processImage - numImages mismatch - bayer: "
+ + mSrcColor.size() + ", mono: " + mSrcMono.size());
+ return null;
+ }
+
+ int numImages = mSrcColor.size();
+ ByteBuffer[] srcColorY = new ByteBuffer[numImages];
+ ByteBuffer[] srcColorVU = new ByteBuffer[numImages];
+ int[][] metadataColor = new int[numImages][];
+ ByteBuffer[] srcMonoY = new ByteBuffer[numImages];
+ int[][] metadataMono = new int[numImages][];
+
+ Log.e(TAG, "processImage - numImages: " + numImages);
+
+ for (int i = 0; i < numImages; i++) {
+ SourceImage color = mSrcColor.get(i);
+ SourceImage mono = mSrcMono.get(i);
+
+ srcColorY[i] = color.mY;
+ srcColorVU[i] = color.mVU;
+ metadataColor[i] = color.mMetadata;
+
+ srcMonoY[i] = mono.mY;
+ metadataMono[i] = mono.mMetadata;
+ }
+
+ Plane[] colorPlanes = mRefColorImage.getPlanes();
+ Plane[] monoPlanes = mRefMonoImage.getPlanes();
+ ByteBuffer dstY = ByteBuffer.allocateDirect(colorPlanes[Y_PLANE]
+ .getBuffer().capacity());
+ ByteBuffer dstVU = ByteBuffer.allocateDirect(colorPlanes[VU_PLANE]
+ .getBuffer().capacity());
+ int[] roiRect = new int[4];
+
+ boolean result = clearSightProcess(numImages, srcColorY, srcColorVU,
+ metadataColor, mRefColorImage.getWidth(),
+ mRefColorImage.getHeight(),
+ colorPlanes[Y_PLANE].getRowStride(),
+ colorPlanes[VU_PLANE].getRowStride(), srcMonoY, metadataMono,
+ mRefMonoImage.getWidth(), mRefMonoImage.getHeight(),
+ monoPlanes[Y_PLANE].getRowStride(), otp_calib.getBytes(), dstY, dstVU,
+ colorPlanes[Y_PLANE].getRowStride(),
+ colorPlanes[VU_PLANE].getRowStride(), roiRect);
+
+ if (result) {
+ dstY.rewind();
+ dstVU.rewind();
+ byte[] data = new byte[dstY.capacity() + dstVU.capacity()];
+ int[] strides = new int[] { colorPlanes[Y_PLANE].getRowStride(),
+ colorPlanes[VU_PLANE].getRowStride() };
+ dstY.get(data, 0, dstY.capacity());
+ dstVU.get(data, dstY.capacity(), dstVU.capacity());
+ return new ClearsightImage(new YuvImage(data, ImageFormat.NV21,
+ mRefColorImage.getWidth(), mRefColorImage.getHeight(),
+ strides), roiRect);
+ } else {
+ return null;
+ }
+ }
+
+ native public boolean configureClearSight(float focalLengthRatio,
+ float brIntensity, float sharpenIntensity);
+
+ native public boolean clearSightRegisterImage(ByteBuffer refY,
+ ByteBuffer srcY, ByteBuffer srcVU, int width, int height,
+ int strideY, int strideVU, ByteBuffer dstY, ByteBuffer dstVU,
+ int[] metadata);
+
+ native public boolean clearSightProcess(int numImagePairs,
+ ByteBuffer[] srcColorY, ByteBuffer[] srcColorVU,
+ int[][] metadataColor, int srcColorWidth, int srcColorHeight,
+ int srcColorStrideY, int srcColorStrideVU, ByteBuffer[] srcMonoY,
+ int[][] metadataMono, int srcMonoWidth, int srcMonoHeight,
+ int srcMonoStrideY, byte[] otp, ByteBuffer dstY, ByteBuffer dstVU,
+ int dstStrideY, int dstStrideVU, int[] roiRect);
+
+ private class SourceImage {
+ ByteBuffer mY;
+ ByteBuffer mVU;
+ int[] mMetadata;
+
+ SourceImage(ByteBuffer y, ByteBuffer vu, int[] metadata) {
+ mY = y;
+ mVU = vu;
+ mMetadata = metadata;
+ }
+ }
+
+ public static class ClearsightImage {
+ private YuvImage mImage;
+ private Rect mRoiRect;
+
+ ClearsightImage(YuvImage image, int[] rect) {
+ mImage = image;
+ mRoiRect = new Rect(rect[0], rect[1], rect[0] + rect[2], rect[1]
+ + rect[3]);
+ }
+
+ public Rect getRoiRect() {
+ return mRoiRect;
+ }
+
+ public long getDataLength() {
+ return (mImage==null?0:mImage.getYuvData().length);
+ }
+
+ public int getWidth() {
+ return (mRoiRect.right - mRoiRect.left);
+ }
+
+ public int getHeight() {
+ return (mRoiRect.bottom - mRoiRect.top);
+ }
+
+ public byte[] compressToJpeg() {
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+
+ mImage.compressToJpeg(mRoiRect, 100, baos);
+ return baos.toByteArray();
+ }
+ }
+}