summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--assets/dependency.json34
-rw-r--r--res/layout/capture_module.xml7
-rw-r--r--res/values/camera2arrays.xml12
-rw-r--r--res/values/qcomstrings.xml1
-rw-r--r--res/values/strings.xml1
-rw-r--r--res/xml/capture_preferences.xml6
-rw-r--r--src/com/android/camera/CameraActivity.java15
-rw-r--r--src/com/android/camera/CaptureModule.java357
-rw-r--r--src/com/android/camera/CaptureUI.java222
-rw-r--r--src/com/android/camera/PhotoMenu.java11
-rw-r--r--src/com/android/camera/RefocusActivity.java56
-rw-r--r--src/com/android/camera/SettingsManager.java62
-rwxr-xr-xsrc/com/android/camera/VideoModule.java2
-rw-r--r--src/com/android/camera/WideAnglePanoramaUI.java2
-rw-r--r--src/com/android/camera/imageprocessor/FrameProcessor.java228
-rw-r--r--src/com/android/camera/imageprocessor/PostProcessor.java38
-rw-r--r--src/com/android/camera/imageprocessor/filter/BeautificationFilter.java22
-rw-r--r--src/com/android/camera/imageprocessor/filter/ImageFilter.java14
-rw-r--r--src/com/android/camera/imageprocessor/filter/OptizoomFilter.java17
-rw-r--r--src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java18
-rw-r--r--src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java293
-rw-r--r--src/com/android/camera/imageprocessor/filter/UbifocusFilter.java269
-rw-r--r--src/com/android/camera/ui/RenderOverlay.java8
-rw-r--r--src/com/android/camera/ui/TrackingFocusRenderer.java285
24 files changed, 1689 insertions, 291 deletions
diff --git a/assets/dependency.json b/assets/dependency.json
index 6fe735b7a..763549320 100644
--- a/assets/dependency.json
+++ b/assets/dependency.json
@@ -1,7 +1,34 @@
{
"pref_camera2_scenemode_key":
{
- "_comment":"Copyright (c) 2016, The Linux Foundation. All rights reserved.\nNot a Contribution.\nCopyright (C) 2013 The Android Open Source Project\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\nhttp://www.apache.org/licenses/LICENSE-2.0\nUnless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and\nlimitations under the License.",
+ "_license":[
+ "Copyright (c) 2016, The Linux Foundation. All rights reserved.",
+ "",
+ "Redistribution and use in source and binary forms, with or without",
+ "modification, are permitted provided that the following conditions are",
+ "met:",
+ "* Redistributions of source code must retain the above copyright",
+ "notice, this list of conditions and the following disclaimer.",
+ "* Redistributions in binary form must reproduce the above",
+ "copyright notice, this list of conditions and the following",
+ "disclaimer in the documentation and/or other materials provided",
+ "with the distribution.",
+ "* Neither the name of The Linux Foundation nor the names of its",
+ "contributors may be used to endorse or promote products derived",
+ "from this software without specific prior written permission.",
+ "",
+ "THIS SOFTWARE IS PROVIDED \"AS IS\" AND ANY EXPRESS OR IMPLIED",
+ "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF",
+ "MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT",
+ "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS",
+ "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR",
+ "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF",
+ "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR",
+ "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,",
+ "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE",
+ "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN",
+ "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
+ ],
"default":
{"pref_camera2_coloreffect_key":"0",
"pref_camera2_flashmode_key":"2",
@@ -12,10 +39,11 @@
,
"100":
{"pref_camera2_longshot_key":"off",
- "pref_camera2_mono_only_key":"off"}
+ "pref_camera2_mono_only_key":"off",
+ "pref_camera2_flashmode_key":"1"}
,
"0":
{"pref_camera2_clearsight_key":"off",
"pref_camera2_mono_preview_key":"off"}
}
-} \ No newline at end of file
+}
diff --git a/res/layout/capture_module.xml b/res/layout/capture_module.xml
index 07594cef3..dd7d3cb1b 100644
--- a/res/layout/capture_module.xml
+++ b/res/layout/capture_module.xml
@@ -32,17 +32,16 @@
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_gravity="center_vertical|center_horizontal">
-
<com.android.camera.ui.AutoFitSurfaceView
android:id="@+id/mdp_preview_content"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:layout_gravity="center" />
-
<com.android.camera.ui.AutoFitSurfaceView
- android:id="@+id/mdp_preview_content2"
android:layout_width="300dp"
- android:layout_height="300dp" />
+ android:layout_height="300dp"
+ android:id="@+id/mdp_preview_content_mono"
+ android:visibility="gone"/>
</FrameLayout>
<View
diff --git a/res/values/camera2arrays.xml b/res/values/camera2arrays.xml
index 089838405..25105ba65 100644
--- a/res/values/camera2arrays.xml
+++ b/res/values/camera2arrays.xml
@@ -70,6 +70,16 @@
<item>off</item>
</string-array>
+ <string-array name="pref_camera2_trackingfocus_entries" translatable="true">
+ <item>On</item>
+ <item>Off</item>
+ </string-array>
+
+ <string-array name="pref_camera2_trackingfocus_entryvalues" translatable="false">
+ <item>on</item>
+ <item>off</item>
+ </string-array>
+
<string-array name="pref_camera2_mono_preview_entries" translatable="true">
<item>@string/pref_camera2_mono_preview_entry_on</item>
<item>@string/pref_camera2_mono_preview_entry_off</item>
@@ -99,7 +109,7 @@
<item>0</item>
<item>100</item>
<item>18</item>
- <item>-1</item>
+ <item>102</item>
<item>101</item>
<item>3</item>
<item>4</item>
diff --git a/res/values/qcomstrings.xml b/res/values/qcomstrings.xml
index 6d1ec7862..e8917f3ee 100644
--- a/res/values/qcomstrings.xml
+++ b/res/values/qcomstrings.xml
@@ -1003,5 +1003,6 @@
<string name="pref_camera2_videosnap_entry_enable" translatable="true">Enable</string>
<string name="pref_camera2_videosnap_entry_disable" translatable="true">Disable</string>
+ <string name="pref_camera2_trackingfocus_title" translatable="true">Tracking Focus</string>
</resources>
diff --git a/res/values/strings.xml b/res/values/strings.xml
index 898329a7f..775aeb60d 100644
--- a/res/values/strings.xml
+++ b/res/values/strings.xml
@@ -748,4 +748,5 @@ CHAR LIMIT = NONE] -->
<string name="remaining_photos_format">%d left</string>
<string name="initial_recording_seconds">00:00</string>
+ <string name="makeup_video_size_limit">Makeup works only under VGA size in video recording.</string>
</resources>
diff --git a/res/xml/capture_preferences.xml b/res/xml/capture_preferences.xml
index d7d478d0a..c5aab1a7d 100644
--- a/res/xml/capture_preferences.xml
+++ b/res/xml/capture_preferences.xml
@@ -273,4 +273,10 @@
camera:entryValues="@array/pref_camera2_videosnap_entryvalues"
camera:key="pref_camera2_videosnap_key"
camera:title="@string/pref_camera2_videosnap_title" />
+ <ListPreference
+ camera:defaultValue="off"
+ camera:key="pref_camera2_trackingfocus_key"
+ camera:entries="@array/pref_camera2_trackingfocus_entries"
+ camera:entryValues="@array/pref_camera2_trackingfocus_entryvalues"
+ camera:title="@string/pref_camera2_trackingfocus_title"/>
</PreferenceGroup>
diff --git a/src/com/android/camera/CameraActivity.java b/src/com/android/camera/CameraActivity.java
index e3b4573d1..68d9c7c3a 100644
--- a/src/com/android/camera/CameraActivity.java
+++ b/src/com/android/camera/CameraActivity.java
@@ -249,6 +249,7 @@ public class CameraActivity extends Activity
private Cursor mCursor;
private WakeLock mWakeLock;
+ private static final int REFOCUS_ACTIVITY_CODE = 1;
private class MyOrientationEventListener
extends OrientationEventListener {
@@ -576,6 +577,16 @@ public class CameraActivity extends Activity
return;
}
}
+ if (mCurrentModule instanceof CaptureModule) {
+ if (((CaptureModule) mCurrentModule).isRefocus()) {
+ Intent intent = new Intent();
+ intent.setClass(this, RefocusActivity.class);
+ intent.setData(uri);
+ intent.setFlags(RefocusActivity.MAP_ROTATED);
+ startActivityForResult(intent, REFOCUS_ACTIVITY_CODE);
+ return;
+ }
+ }
try {
Intent intent = IntentHelper.getGalleryIntent(this);
intent.setAction(Intent.ACTION_VIEW);
@@ -1624,6 +1635,10 @@ public class CameraActivity extends Activity
if (requestCode == REQ_CODE_DONT_SWITCH_TO_PREVIEW) {
mResetToPreviewOnResume = false;
mIsEditActivityInProgress = false;
+ } else if (requestCode == REFOCUS_ACTIVITY_CODE) {
+ if(resultCode == RESULT_OK) {
+ mCaptureModule.setRefocusLastTaken(false);
+ }
} else {
super.onActivityResult(requestCode, resultCode, data);
}
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index a7448aedb..c541dd4d0 100644
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -68,6 +68,7 @@ import android.view.KeyEvent;
import android.view.OrientationEventListener;
import android.view.Surface;
import android.view.SurfaceHolder;
+import android.view.SurfaceView;
import android.view.View;
import android.widget.Toast;
@@ -80,6 +81,7 @@ import com.android.camera.imageprocessor.filter.SharpshooterFilter;
import com.android.camera.ui.CountDownView;
import com.android.camera.ui.ModuleSwitcher;
import com.android.camera.ui.RotateTextToast;
+import com.android.camera.ui.TrackingFocusRenderer;
import com.android.camera.util.CameraUtil;
import com.android.camera.util.PersistUtil;
import com.android.camera.util.SettingTranslation;
@@ -179,7 +181,7 @@ public class CaptureModule implements CameraModule, PhotoController,
// The degrees of the device rotated clockwise from its natural orientation.
private int mOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
private boolean mFirstTimeInitialized;
- private boolean mInitialized = false;
+ private boolean mCamerasOpened = false;
private boolean mIsLinked = false;
private long mCaptureStartTime;
private boolean mPaused = true;
@@ -199,6 +201,7 @@ public class CaptureModule implements CameraModule, PhotoController,
private CameraCharacteristics mMainCameraCharacteristics;
private int mDisplayRotation;
private int mDisplayOrientation;
+ private boolean mIsRefocus = false;
/**
* A {@link CameraCaptureSession } for camera preview.
@@ -216,7 +219,6 @@ public class CaptureModule implements CameraModule, PhotoController,
*/
private PostProcessor mPostProcessor;
private FrameProcessor mFrameProcessor;
- private Size mFrameProcPreviewOutputSize;
private CaptureResult mPreviewCaptureResult;
private Face[] mPreviewFaces = null;
private Face[] mStickyFaces = null;
@@ -240,7 +242,6 @@ public class CaptureModule implements CameraModule, PhotoController,
private CameraCaptureSession mCurrentSession;
private Size mPreviewSize;
private Size mPictureSize;
- private Size mVideoPreviewSize;
private Size mVideoSize;
private Size mVideoSnapshotSize;
@@ -398,13 +399,14 @@ public class CaptureModule implements CameraModule, PhotoController,
});
mFirstPreviewLoaded = true;
}
-
- Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
- mPreviewFaces = faces;
- if(faces != null && faces.length != 0) {
- mStickyFaces = faces;
+ if (id == getMainCameraId()) {
+ Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
+ mPreviewFaces = faces;
+ if (faces != null && faces.length != 0) {
+ mStickyFaces = faces;
+ }
+ mPreviewCaptureResult = result;
}
- mPreviewCaptureResult = result;
updateCaptureStateMachine(id, result);
}
@@ -441,23 +443,23 @@ public class CaptureModule implements CameraModule, PhotoController,
if (mPaused) {
return;
}
+
+ mCameraDevice[id] = cameraDevice;
+ mCameraOpened[id] = true;
+
if (isBackCamera() && getCameraMode() == DUAL_MODE && id == BAYER_ID) {
Message msg = mCameraHandler.obtainMessage(OPEN_CAMERA, MONO_ID);
mCameraHandler.sendMessage(msg);
- }
- if (!mInitialized) {
- mInitialized = true;
+ } else {
+ mCamerasOpened = true;
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
mUI.onCameraOpened(mCameraIdList);
}
});
+ createSessions();
}
-
- mCameraDevice[id] = cameraDevice;
- mCameraOpened[id] = true;
- createSession(id);
}
@Override
@@ -465,8 +467,9 @@ public class CaptureModule implements CameraModule, PhotoController,
int id = Integer.parseInt(cameraDevice.getId());
Log.d(TAG, "onDisconnected " + id);
cameraDevice.close();
- mCameraDevice = null;
+ mCameraDevice[id] = null;
mCameraOpenCloseLock.release();
+ mCamerasOpened = false;
}
@Override
@@ -476,6 +479,7 @@ public class CaptureModule implements CameraModule, PhotoController,
cameraDevice.close();
mCameraDevice[id] = null;
mCameraOpenCloseLock.release();
+ mCamerasOpened = false;
if (null != mActivity) {
mActivity.finish();
}
@@ -487,6 +491,7 @@ public class CaptureModule implements CameraModule, PhotoController,
Log.d(TAG, "onClosed " + id);
mCameraDevice[id] = null;
mCameraOpenCloseLock.release();
+ mCamerasOpened = false;
}
};
@@ -661,6 +666,26 @@ public class CaptureModule implements CameraModule, PhotoController,
}
}
+ private void createSessions() {
+ if (mPaused || !mCamerasOpened || !mSurfaceReady) return;
+ if (isBackCamera()) {
+ switch (getCameraMode()) {
+ case DUAL_MODE:
+ createSession(BAYER_ID);
+ createSession(MONO_ID);
+ break;
+ case BAYER_MODE:
+ createSession(BAYER_ID);
+ break;
+ case MONO_MODE:
+ createSession(MONO_ID);
+ break;
+ }
+ } else {
+ createSession(FRONT_ID);
+ }
+ }
+
private void createSession(final int id) {
if (mPaused || !mCameraOpened[id] || !mSurfaceReady) return;
Log.d(TAG, "createSession " + id);
@@ -682,7 +707,9 @@ public class CaptureModule implements CameraModule, PhotoController,
}
// When the session is ready, we start displaying the preview.
mCaptureSession[id] = cameraCaptureSession;
- mCurrentSession = cameraCaptureSession;
+ if(id == getMainCameraId()) {
+ mCurrentSession = cameraCaptureSession;
+ }
initializePreviewConfiguration(id);
setDisplayOrientation();
updateFaceDetection();
@@ -704,7 +731,7 @@ public class CaptureModule implements CameraModule, PhotoController,
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
- Log.e(TAG, "cameracapturesession - onConfigureFailed");
+ Log.e(TAG, "cameracapturesession - onConfigureFailed "+id);
new AlertDialog.Builder(mActivity)
.setTitle("Camera Initialization Failed")
.setMessage("Closing SnapdragonCamera")
@@ -725,6 +752,11 @@ public class CaptureModule implements CameraModule, PhotoController,
}
};
+ if(id == getMainCameraId()) {
+ mFrameProcessor.init(mPreviewSize);
+ mFrameProcessor.setOutputSurface(surface);
+ }
+
if(isClearSightOn()) {
mPreviewRequestBuilder[id].addTarget(surface);
list.add(surface);
@@ -732,16 +764,17 @@ public class CaptureModule implements CameraModule, PhotoController,
id == BAYER_ID, mCameraDevice[id], list, captureSessionCallback);
} else if (id == getMainCameraId()) {
if(mFrameProcessor.isFrameFilterEnabled()) {
- mFrameProcessor.init(mFrameProcPreviewOutputSize);
mActivity.runOnUiThread(new Runnable() {
public void run() {
- mUI.getSurfaceHolder().setFixedSize(mFrameProcPreviewOutputSize.getHeight(), mFrameProcPreviewOutputSize.getWidth());
+ mUI.getSurfaceHolder().setFixedSize(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
});
}
- mFrameProcessor.setOutputSurface(surface);
- mPreviewRequestBuilder[id].addTarget(mFrameProcessor.getInputSurface());
- list.add(mFrameProcessor.getInputSurface());
+ List<Surface> surfaces = mFrameProcessor.getInputSurfaces();
+ for(Surface surs : surfaces) {
+ mPreviewRequestBuilder[id].addTarget(surs);
+ list.add(surs);
+ }
list.add(mImageReader[id].getSurface());
mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null);
} else {
@@ -755,11 +788,23 @@ public class CaptureModule implements CameraModule, PhotoController,
}
}
- private void setAFModeToPreview(int id, int afMode) {
+ public void setAFModeToPreview(int id, int afMode) {
Log.d(TAG, "setAFModeToPreview " + afMode);
mPreviewRequestBuilder[id].set(CaptureRequest.CONTROL_AF_MODE, afMode);
applyAFRegions(mPreviewRequestBuilder[id], id);
applyAERegions(mPreviewRequestBuilder[id], id);
+ mPreviewRequestBuilder[id].setTag(id);
+ try {
+ mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
+ .build(), mCaptureCallback, mCameraHandler);
+ } catch (CameraAccessException e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void setFocusDistanceToPreview(int id, float fd) {
+ mPreviewRequestBuilder[id].set(CaptureRequest.LENS_FOCUS_DISTANCE, fd);
+ mPreviewRequestBuilder[id].setTag(id);
try {
mCaptureSession[id].setRepeatingRequest(mPreviewRequestBuilder[id]
.build(), mCaptureCallback, mCameraHandler);
@@ -773,6 +818,10 @@ public class CaptureModule implements CameraModule, PhotoController,
mSettingsManager.reinit(getMainCameraId());
}
+ public boolean isRefocus() {
+ return mIsRefocus;
+ }
+
public boolean getRecordLocation() {
String value = mSettingsManager.getValue(SettingsManager.KEY_RECORD_LOCATION);
if (value == null) value = RecordLocationPreference.VALUE_NONE;
@@ -856,7 +905,7 @@ public class CaptureModule implements CameraModule, PhotoController,
CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice
.TEMPLATE_PREVIEW);
builder.setTag(id);
- builder.addTarget(getPreviewSurface(id));
+ addPreviewSurface(builder, null, id);
applySettingsForLockFocus(builder, id);
CaptureRequest request = builder.build();
@@ -878,7 +927,7 @@ public class CaptureModule implements CameraModule, PhotoController,
CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice
.TEMPLATE_PREVIEW);
builder.setTag(id);
- builder.addTarget(getPreviewSurface(id));
+ addPreviewSurface(builder, null, id);
mControlAFMode = CaptureRequest.CONTROL_AF_MODE_AUTO;
applySettingsForAutoFocus(builder, id);
@@ -913,9 +962,32 @@ public class CaptureModule implements CameraModule, PhotoController,
mPreviewRequestBuilder[id].set(BayerMonoLinkEnableKey, (byte) 0);
}
}
-
private void captureStillPicture(final int id) {
Log.d(TAG, "captureStillPicture " + id);
+ mIsRefocus = false;
+ CameraCaptureSession.CaptureCallback captureCallback = new CameraCaptureSession.CaptureCallback() {
+
+ @Override
+ public void onCaptureCompleted(CameraCaptureSession session,
+ CaptureRequest request,
+ TotalCaptureResult result) {
+ Log.d(TAG, "captureStillPicture onCaptureCompleted: " + id);
+ }
+
+ @Override
+ public void onCaptureFailed(CameraCaptureSession session,
+ CaptureRequest request,
+ CaptureFailure result) {
+ Log.d(TAG, "captureStillPicture onCaptureFailed: " + id);
+ }
+
+ @Override
+ public void onCaptureSequenceCompleted(CameraCaptureSession session, int
+ sequenceId, long frameNumber) {
+ Log.d(TAG, "captureStillPicture onCaptureSequenceCompleted: " + id);
+ unlockFocus(id);
+ }
+ };
try {
if (null == mActivity || null == mCameraDevice[id]) {
warningToast("Camera is not ready yet to take a picture.");
@@ -934,7 +1006,7 @@ public class CaptureModule implements CameraModule, PhotoController,
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, CameraUtil.getJpegRotation(id, mOrientation));
captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
- captureBuilder.addTarget(getPreviewSurface(id));
+ addPreviewSurface(captureBuilder, null, id);
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode);
captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE);
applySettingsForCapture(captureBuilder, id);
@@ -943,31 +1015,14 @@ public class CaptureModule implements CameraModule, PhotoController,
ClearSightImageProcessor.getInstance().capture(
id==BAYER_ID, mCaptureSession[id], captureBuilder, mCaptureCallbackHandler);
} else if(id == getMainCameraId() && mPostProcessor.isFilterOn()) {
+ mCaptureSession[id].stopRepeating();
captureBuilder.addTarget(mImageReader[id].getSurface());
- List<CaptureRequest> captureList = mPostProcessor.setRequiredImages(captureBuilder);
- mCaptureSession[id].captureBurst(captureList, new CameraCaptureSession.CaptureCallback() {
-
- @Override
- public void onCaptureCompleted(CameraCaptureSession session,
- CaptureRequest request,
- TotalCaptureResult result) {
- Log.d(TAG, "captureStillPicture onCaptureCompleted: " + id);
- }
-
- @Override
- public void onCaptureFailed(CameraCaptureSession session,
- CaptureRequest request,
- CaptureFailure result) {
- Log.d(TAG, "captureStillPicture onCaptureFailed: " + id);
- }
-
- @Override
- public void onCaptureSequenceCompleted(CameraCaptureSession session, int
- sequenceId, long frameNumber) {
- Log.d(TAG, "captureStillPicture onCaptureSequenceCompleted: " + id);
- unlockFocus(id);
- }
- }, mCaptureCallbackHandler);
+ if(mPostProcessor.isManualMode()) {
+ mPostProcessor.manualCapture(captureBuilder, mCaptureSession[id], captureCallback, mCaptureCallbackHandler);
+ } else {
+ List<CaptureRequest> captureList = mPostProcessor.setRequiredImages(captureBuilder);
+ mCaptureSession[id].captureBurst(captureList, captureCallback, mCaptureCallbackHandler);
+ }
} else {
captureBuilder.addTarget(mImageReader[id].getSurface());
mCaptureSession[id].stopRepeating();
@@ -1106,7 +1161,7 @@ public class CaptureModule implements CameraModule, PhotoController,
CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice
.TEMPLATE_PREVIEW);
builder.setTag(id);
- builder.addTarget(getPreviewSurface(id));
+ addPreviewSurface(builder, null, id);
applySettingsForPrecapture(builder, id);
CaptureRequest request = builder.build();
mPrecaptureRequestHashCode[id] = request.hashCode();
@@ -1117,36 +1172,6 @@ public class CaptureModule implements CameraModule, PhotoController,
}
}
- private void determineFrameProcPreviewOutputSize(List<Size> sizeList, float targetRatio) {
- Display display = mActivity.getWindowManager().getDefaultDisplay();
- Point ds = new Point();
- display.getSize(ds);
- int i=0, j=0, width, height;
- float ratio;
- for(; i < sizeList.size(); i++) {
- width = sizeList.get(i).getHeight();
- height = sizeList.get(i).getWidth();
- ratio = (float)height/width;
- if(ds.x >= width || ds.y >= height) {
- if(j == 0) {
- j = i;
- }
- if(ratio < targetRatio + 0.2f && ratio > targetRatio - 0.2f) {
- break;
- }
- }
- }
- if(i == sizeList.size()) {
- if(j != 0) {
- mFrameProcPreviewOutputSize = sizeList.get(j);
- } else {
- mFrameProcPreviewOutputSize = sizeList.get(sizeList.size()-1);
- }
- } else {
- mFrameProcPreviewOutputSize = sizeList.get(i);
- }
- }
-
public CameraCharacteristics getMainCameraCharacteristics() {
return mMainCameraCharacteristics;
}
@@ -1180,19 +1205,6 @@ public class CaptureModule implements CameraModule, PhotoController,
}
mCameraId[i] = cameraId;
- if (i == getMainCameraId()) {
- String pictureSize = mSettingsManager.getValue(SettingsManager
- .KEY_PICTURE_SIZE);
-
- Size size = parsePictureSize(pictureSize);
-
- Point screenSize = new Point();
- mActivity.getWindowManager().getDefaultDisplay().getSize(screenSize);
- Size[] prevSizes = map.getOutputSizes(imageFormat);
- mFrameProcPreviewOutputSize = getOptimalPreviewSize(size, prevSizes, screenSize.x,
- screenSize.y);
- }
-
if (isClearSightOn()) {
if(i == getMainCameraId()) {
ClearSightImageProcessor.getInstance().init(mPictureSize.getWidth(),
@@ -1203,12 +1215,9 @@ public class CaptureModule implements CameraModule, PhotoController,
// No Clearsight
mImageReader[i] = ImageReader.newInstance(mPictureSize.getWidth(),
mPictureSize.getHeight(), imageFormat, MAX_IMAGE_NUM);
- if(mPostProcessor.isFilterOn() && i == getMainCameraId()) {
+ if((mPostProcessor.isFilterOn() || getFrameFilters().size() != 0)
+ && i == getMainCameraId()) {
mImageReader[i].setOnImageAvailableListener(mPostProcessor, mImageAvailableHandler);
-// if(mFrameProcessor.isFrameFilterEnabled()) {
-// determineFrameProcPreviewOutputSize(Arrays.asList(map.getOutputSizes(imageFormat)),
-// (float) size.getWidth() / (float) size.getHeight());
-// }
} else {
mImageReader[i].setOnImageAvailableListener(new ImageAvailableListener(i) {
@Override
@@ -1283,7 +1292,7 @@ public class CaptureModule implements CameraModule, PhotoController,
CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice
.TEMPLATE_PREVIEW);
builder.setTag(id);
- builder.addTarget(getPreviewSurface(id));
+ addPreviewSurface(builder, null, id);
applySettingsForUnlockFocus(builder, id);
mCaptureSession[id].capture(builder.build(), mCaptureCallback, mCameraHandler);
@@ -1326,7 +1335,7 @@ public class CaptureModule implements CameraModule, PhotoController,
for (int i = 0; i < MAX_NUM_CAM; i++) {
if (null != mCaptureSession[i]) {
- if (mIsLinked) {
+ if (mIsLinked && mCamerasOpened) {
unLinkBayerMono(i);
try {
mCaptureSession[i].capture(mPreviewRequestBuilder[i].build(), null,
@@ -1548,20 +1557,36 @@ public class CaptureModule implements CameraModule, PhotoController,
}
private ArrayList<Integer> getFrameProcFilterId() {
- String scene = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP);
ArrayList<Integer> filters = new ArrayList<Integer>();
+
+ String scene = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP);
if(scene != null && scene.equalsIgnoreCase("on")) {
filters.add(FrameProcessor.FILTER_MAKEUP);
}
+ String trackingFocus = mSettingsManager.getValue(SettingsManager.KEY_TRACKINGFOCUS);
+ if(trackingFocus != null && trackingFocus.equalsIgnoreCase("on")) {
+ filters.add(FrameProcessor.LISTENER_TRACKING_FOCUS);
+ }
return filters;
}
+ public void setRefocusLastTaken(final boolean value) {
+ mIsRefocus = value;
+ mActivity.runOnUiThread(new Runnable() {
+ public void run() {
+ mUI.showRefocusToast(value);
+ }
+ });
+ }
+
private int getPostProcFilterId(int mode) {
if (mode == SettingsManager.SCENE_MODE_OPTIZOOM_INT) {
return PostProcessor.FILTER_OPTIZOOM;
} else if (mode == SettingsManager.SCENE_MODE_NIGHT_INT && SharpshooterFilter.isSupportedStatic()) {
return PostProcessor.FILTER_SHARPSHOOTER;
+ } else if (mode == SettingsManager.SCENE_MODE_UBIFOCUS_INT) {
+ return PostProcessor.FILTER_UBIFOCUS;
}
return PostProcessor.FILTER_NONE;
}
@@ -1597,7 +1622,7 @@ public class CaptureModule implements CameraModule, PhotoController,
mFrameProcessor.onOpen(getFrameProcFilterId());
}
- if(mPostProcessor.isFilterOn()) {
+ if(mPostProcessor.isFilterOn() || getFrameFilters().size() != 0) {
setUpCameraOutputs(ImageFormat.YUV_420_888);
} else {
setUpCameraOutputs(ImageFormat.JPEG);
@@ -1759,7 +1784,7 @@ public class CaptureModule implements CameraModule, PhotoController,
@Override
public void onSingleTapUp(View view, int x, int y) {
- if (mPaused || mCameraDevice == null || !mFirstTimeInitialized || !mAutoFocusRegionSupported
+ if (mPaused || !mCamerasOpened || !mFirstTimeInitialized || !mAutoFocusRegionSupported
|| !mAutoExposureRegionSupported || !isTouchToFocusAllowed()) {
return;
}
@@ -1870,22 +1895,7 @@ public class CaptureModule implements CameraModule, PhotoController,
}
Log.d(TAG, "onPreviewUIReady");
mSurfaceReady = true;
- if (isBackCamera()) {
- switch (getCameraMode()) {
- case DUAL_MODE:
- createSession(BAYER_ID);
- createSession(MONO_ID);
- break;
- case BAYER_MODE:
- createSession(BAYER_ID);
- break;
- case MONO_MODE:
- createSession(MONO_ID);
- break;
- }
- } else {
- createSession(FRONT_ID);
- }
+ createSessions();
}
@Override
@@ -1999,11 +2009,6 @@ public class CaptureModule implements CameraModule, PhotoController,
private void updateVideoSize() {
String videoSize = mSettingsManager.getValue(SettingsManager.KEY_VIDEO_QUALITY);
mVideoSize = parsePictureSize(videoSize);
- Point screenSize = new Point();
- mActivity.getWindowManager().getDefaultDisplay().getSize(screenSize);
- Size[] prevSizes = mSettingsManager.getSupportedOutputSize(getMainCameraId(),
- MediaRecorder.class);
- mVideoPreviewSize = getOptimalPreviewSize(mVideoSize, prevSizes, screenSize.x, screenSize.y);
}
private void updateVideoSnapshotSize() {
@@ -2041,8 +2046,8 @@ public class CaptureModule implements CameraModule, PhotoController,
mState[cameraId] = STATE_PREVIEW;
mControlAFMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
closePreviewSession();
- boolean changed = mUI.setPreviewSize(mVideoPreviewSize.getWidth(), mVideoPreviewSize
- .getHeight());
+ mFrameProcessor.onClose();
+ boolean changed = mUI.setPreviewSize(mVideoSize.getWidth(), mVideoSize.getHeight());
if (changed) {
mUI.hideSurfaceView();
mUI.showSurfaceView();
@@ -2055,11 +2060,18 @@ public class CaptureModule implements CameraModule, PhotoController,
.createCaptureRequest(CameraDevice.TEMPLATE_RECORD);
List<Surface> surfaces = new ArrayList<>();
- Surface previewSurface = getPreviewSurface(cameraId);
- surfaces.add(previewSurface);
- mPreviewBuilder.addTarget(previewSurface);
- surfaces.add(mMediaRecorder.getSurface());
- mPreviewBuilder.addTarget(mMediaRecorder.getSurface());
+ Surface surface = getPreviewSurfaceForSession(cameraId);
+ mFrameProcessor.init(mVideoSize);
+ if(mFrameProcessor.isFrameFilterEnabled()) {
+ mActivity.runOnUiThread(new Runnable() {
+ public void run() {
+ mUI.getSurfaceHolder().setFixedSize(mVideoSize.getHeight(), mVideoSize.getWidth());
+ }
+ });
+ }
+ mFrameProcessor.setOutputSurface(surface);
+ mFrameProcessor.setVideoOutputSurface(mMediaRecorder.getSurface());
+ addPreviewSurface(mPreviewBuilder, surfaces, cameraId);
surfaces.add(mVideoSnapshotImageReader.getSurface());
mCameraDevice[cameraId].createCaptureSession(surfaces, new CameraCaptureSession
@@ -2235,12 +2247,13 @@ public class CaptureModule implements CameraModule, PhotoController,
Log.d(TAG, "stopRecordingVideo " + cameraId);
// Stop recording
+ mFrameProcessor.onClose();
+ mFrameProcessor.setVideoOutputSurface(null);
closePreviewSession();
mMediaRecorder.stop();
mMediaRecorder.reset();
saveVideo();
-
mUI.showRecordingUI(false);
mIsRecordingVideo = false;
boolean changed = mUI.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
@@ -2348,9 +2361,13 @@ public class CaptureModule implements CameraModule, PhotoController,
String fileName = generateVideoFilename(outputFormat);
Log.v(TAG, "New video filename: " + fileName);
mMediaRecorder.setOutputFile(fileName);
- mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate);
mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate);
- mMediaRecorder.setVideoSize(mProfile.videoFrameWidth, mProfile.videoFrameHeight);
+ mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate);
+ if(mFrameProcessor.isFrameFilterEnabled()) {
+ mMediaRecorder.setVideoSize(mProfile.videoFrameHeight, mProfile.videoFrameWidth);
+ } else {
+ mMediaRecorder.setVideoSize(mProfile.videoFrameWidth, mProfile.videoFrameHeight);
+ }
mMediaRecorder.setVideoEncoder(videoEncoder);
if (!mCaptureTimeLapse) {
mMediaRecorder.setAudioEncodingBitRate(mProfile.audioBitRate);
@@ -2374,7 +2391,11 @@ public class CaptureModule implements CameraModule, PhotoController,
rotation += Integer.parseInt(videoRotation);
rotation = rotation % 360;
}
- mMediaRecorder.setOrientationHint(rotation);
+ if(mFrameProcessor.isFrameFilterEnabled()) {
+ mMediaRecorder.setOrientationHint(0);
+ } else {
+ mMediaRecorder.setOrientationHint(rotation);
+ }
mMediaRecorder.prepare();
}
@@ -2409,7 +2430,8 @@ public class CaptureModule implements CameraModule, PhotoController,
if (seconds > 0) {
mUI.startCountDown(seconds, true);
} else {
- if (mPostProcessor.isFilterOn() && mPostProcessor.isItBusy()) {
+ if((mPostProcessor.isFilterOn() || getFrameFilters().size() != 0)
+ && mPostProcessor.isItBusy()) {
warningToast("It's still busy processing previous scene mode request.");
return;
}
@@ -2708,22 +2730,29 @@ public class CaptureModule implements CameraModule, PhotoController,
}
}
- private Surface getPreviewSurface(int id) {
- if (isBackCamera()) {
- if (getCameraMode() == DUAL_MODE && id == MONO_ID) {
- return mUI.getSurfaceHolder2().getSurface();
- } else {
- return mFrameProcessor.getInputSurface();
+ private void addPreviewSurface(CaptureRequest.Builder builder, List<Surface> surfaceList, int id) {
+ if (isBackCamera() && getCameraMode() == DUAL_MODE && id == MONO_ID) {
+ if(surfaceList != null) {
+ surfaceList.add(mUI.getMonoDummySurface());
}
+ builder.addTarget(mUI.getMonoDummySurface());
+ return;
} else {
- return mFrameProcessor.getInputSurface();
+ List<Surface> surfaces = mFrameProcessor.getInputSurfaces();
+ for(Surface surface : surfaces) {
+ if(surfaceList != null) {
+ surfaceList.add(surface);
+ }
+ builder.addTarget(surface);
+ }
+ return;
}
}
private Surface getPreviewSurfaceForSession(int id) {
if (isBackCamera()) {
if (getCameraMode() == DUAL_MODE && id == MONO_ID) {
- return mUI.getSurfaceHolder2().getSurface();
+ return mUI.getMonoDummySurface();
} else {
return mUI.getSurfaceHolder().getSurface();
}
@@ -2811,6 +2840,23 @@ public class CaptureModule implements CameraModule, PhotoController,
mDisplayOrientation = CameraUtil.getDisplayOrientation(mDisplayRotation, getMainCameraId());
}
+ private void checkVideoSizeDependency() {
+ String makeup = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP);
+ if(makeup.equalsIgnoreCase("on")) {
+ if(mVideoSize.getWidth() > 640 || mVideoSize.getHeight() > 480) {
+ mActivity.runOnUiThread(new Runnable() {
+ public void run() {
+ RotateTextToast.makeText(mActivity, R.string.makeup_video_size_limit, Toast.LENGTH_SHORT).show();
+ }
+ });
+ mSettingsManager.setValue(mSettingsManager.KEY_VIDEO_QUALITY, "640x480");
+ }
+ mSettingsManager.updateVideoQualityMenu(getMainCameraId(), 640, 480);
+ } else {
+ mSettingsManager.updateVideoQualityMenu(getMainCameraId(), -1, -1);
+ }
+ }
+
@Override
public void onSettingsChanged(List<SettingsManager.SettingState> settings) {
if (mPaused) return;
@@ -2864,7 +2910,11 @@ public class CaptureModule implements CameraModule, PhotoController,
if (count == 0) restart();
return;
case SettingsManager.KEY_MAKEUP:
- restart();
+ if (count == 0) restart();
+ checkVideoSizeDependency();
+ return;
+ case SettingsManager.KEY_TRACKINGFOCUS:
+ if (count == 0) restart();
return;
case SettingsManager.KEY_SCENE_MODE:
if (count == 0 && checkNeedToRestart(value)) {
@@ -2938,8 +2988,9 @@ public class CaptureModule implements CameraModule, PhotoController,
private boolean checkNeedToRestart(String value) {
mPostProcessor.setFilter(PostProcessor.FILTER_NONE);
int mode = Integer.parseInt(value);
- if (getPostProcFilterId(mode) != PostProcessor.FILTER_NONE)
+ if (getPostProcFilterId(mode) != PostProcessor.FILTER_NONE) {
return true;
+ }
if (value.equals(SettingsManager.SCENE_MODE_DUAL_STRING) && mCurrentMode != DUAL_MODE)
return true;
if (!value.equals(SettingsManager.SCENE_MODE_DUAL_STRING) && mCurrentMode == DUAL_MODE)
@@ -2987,7 +3038,7 @@ public class CaptureModule implements CameraModule, PhotoController,
private Size getMaxSizeWithRatio(Size[] sizes, Size reference) {
float ratio = (float) reference.getWidth() / reference.getHeight();
- for (Size size: sizes) {
+ for (Size size : sizes) {
float prevRatio = (float) size.getWidth() / size.getHeight();
if (Math.abs(prevRatio - ratio) < 0.01) {
return size;
@@ -2996,6 +3047,10 @@ public class CaptureModule implements CameraModule, PhotoController,
return sizes[0];
}
+ public TrackingFocusRenderer getTrackingForcusRenderer() {
+ return mUI.getTrackingFocusRenderer();
+ }
+
/**
* Compares two {@code Size}s based on their areas.
*/
diff --git a/src/com/android/camera/CaptureUI.java b/src/com/android/camera/CaptureUI.java
index f5f23fa18..dd06ba400 100644
--- a/src/com/android/camera/CaptureUI.java
+++ b/src/com/android/camera/CaptureUI.java
@@ -22,16 +22,24 @@ package com.android.camera;
import android.animation.Animator;
import android.content.Context;
import android.content.res.Resources;
+import android.graphics.ImageFormat;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.drawable.AnimationDrawable;
import android.hardware.Camera.Face;
+import android.media.ImageReader;
+import android.renderscript.Allocation;
+import android.renderscript.Element;
+import android.renderscript.RenderScript;
+import android.renderscript.ScriptIntrinsicYuvToRGB;
+import android.renderscript.Type;
import android.text.TextUtils;
import android.util.Log;
import android.view.Display;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.MotionEvent;
+import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.View;
import android.view.ViewGroup;
@@ -42,6 +50,10 @@ import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
+import com.android.camera.imageprocessor.ScriptC_YuvToRgb;
+import com.android.camera.imageprocessor.ScriptC_rotator;
+import com.android.camera.imageprocessor.filter.BeautificationFilter;
+import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener;
import com.android.camera.ui.AutoFitSurfaceView;
import com.android.camera.ui.Camera2FaceView;
import com.android.camera.ui.CameraControls;
@@ -55,13 +67,16 @@ import com.android.camera.ui.RenderOverlay;
import com.android.camera.ui.RotateImageView;
import com.android.camera.ui.RotateLayout;
import com.android.camera.ui.RotateTextToast;
+import com.android.camera.ui.TrackingFocusRenderer;
import com.android.camera.ui.ZoomRenderer;
import com.android.camera.util.CameraUtil;
import org.codeaurora.snapcam.R;
+import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
+import java.util.Set;
public class CaptureUI implements FocusOverlayManager.FocusUI,
PreviewGestures.SingleTapListener,
@@ -96,12 +111,13 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
SettingsManager.KEY_EXPOSURE,
SettingsManager.KEY_WHITE_BALANCE,
SettingsManager.KEY_CAMERA2,
- SettingsManager.KEY_MAKEUP,
SettingsManager.KEY_FACE_DETECTION,
SettingsManager.KEY_VIDEO_FLASH_MODE,
SettingsManager.KEY_VIDEO_DURATION,
- SettingsManager.KEY_VIDEO_QUALITY
- };
+ SettingsManager.KEY_VIDEO_QUALITY,
+ SettingsManager.KEY_TRACKINGFOCUS,
+ SettingsManager.KEY_MAKEUP
+ };
String[] mDeveloperKeys = new String[]{
SettingsManager.KEY_REDEYE_REDUCTION,
SettingsManager.KEY_MONO_ONLY,
@@ -120,9 +136,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
private View mPreviewCover;
private CaptureModule mModule;
private AutoFitSurfaceView mSurfaceView;
- private AutoFitSurfaceView mSurfaceView2;
+ private AutoFitSurfaceView mSurfaceViewMono;
private SurfaceHolder mSurfaceHolder;
- private SurfaceHolder mSurfaceHolder2;
+ private SurfaceHolder mSurfaceHolderMono;
private int mOrientation;
private RotateLayout mMenuLayout;
private RotateLayout mSubMenuLayout;
@@ -135,9 +151,27 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
private PreviewGestures mGestures;
private boolean mUIhidden = false;
private SettingsManager mSettingsManager;
-
+ private TrackingFocusRenderer mTrackingFocusRenderer;
private ImageView mThumbnail;
private Camera2FaceView mFaceView;
+ private Point mDisplaySize = new Point();
+
+ private SurfaceHolder.Callback callbackMono = new SurfaceHolder.Callback() {
+ // SurfaceHolder callbacks
+ @Override
+ public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
+ mSurfaceHolderMono = holder;
+ if(mMonoDummyOutputAllocation != null) {
+ mMonoDummyOutputAllocation.setSurface(mSurfaceHolderMono.getSurface());
+ }
+ }
+ @Override
+ public void surfaceCreated(SurfaceHolder holder) {
+ }
+ @Override
+ public void surfaceDestroyed(SurfaceHolder holder) {
+ }
+ };
private SurfaceHolder.Callback callback = new SurfaceHolder.Callback() {
@@ -152,6 +186,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
Log.v(TAG, "surfaceCreated");
mSurfaceHolder = holder;
previewUIReady();
+ if(mTrackingFocusRenderer != null && mTrackingFocusRenderer.isVisible()) {
+ mTrackingFocusRenderer.setSurfaceDim(mSurfaceView.getLeft(), mSurfaceView.getTop(), mSurfaceView.getRight(), mSurfaceView.getBottom());
+ }
}
@Override
@@ -171,6 +208,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
private CameraControls mCameraControls;
private PieRenderer mPieRenderer;
private ZoomRenderer mZoomRenderer;
+ private Allocation mMonoDummyAllocation;
+ private Allocation mMonoDummyOutputAllocation;
+ private boolean mIsMonoDummyAllocationEverUsed = false;
private int mScreenRatio = CameraUtil.RATIO_UNKNOWN;
private int mTopMargin = 0;
@@ -191,35 +231,8 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
int mPreviewWidth;
int mPreviewHeight;
- private SurfaceHolder.Callback callback2 = new SurfaceHolder.Callback() {
-
- // SurfaceHolder callbacks
- @Override
- public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
- Log.v(TAG, "surfaceChanged2");
- }
-
- @Override
- public void surfaceCreated(SurfaceHolder holder) {
- Log.v(TAG, "surfaceCreated2");
- mSurfaceHolder2 = holder;
- previewUIReady();
- }
-
- @Override
- public void surfaceDestroyed(SurfaceHolder holder) {
- Log.v(TAG, "surfaceDestroyed2");
- mSurfaceHolder2 = null;
- previewUIDestroyed();
- }
- };
-
private void previewUIReady() {
- if((mSurfaceHolder != null && mSurfaceHolder.getSurface().isValid()) &&
- (mSurfaceView2.getVisibility() != View.VISIBLE ||
- (mSurfaceView2.getVisibility() == View.VISIBLE &&
- mSurfaceHolder2 != null &&
- mSurfaceHolder2.getSurface().isValid()))) {
+ if((mSurfaceHolder != null && mSurfaceHolder.getSurface().isValid())) {
mModule.onPreviewUIReady();
mActivity.updateThumbnail(mThumbnail);
}
@@ -229,6 +242,14 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
mModule.onPreviewUIDestroyed();
}
+ public TrackingFocusRenderer getTrackingFocusRenderer() {
+ return mTrackingFocusRenderer;
+ }
+
+ public Point getDisplaySize() {
+ return mDisplaySize;
+ }
+
public CaptureUI(CameraActivity activity, CaptureModule module, View parent) {
mActivity = activity;
mModule = module;
@@ -240,8 +261,6 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
mPreviewCover = mRootView.findViewById(R.id.preview_cover);
// display the view
mSurfaceView = (AutoFitSurfaceView) mRootView.findViewById(R.id.mdp_preview_content);
- mSurfaceView2 = (AutoFitSurfaceView) mRootView.findViewById(R.id.mdp_preview_content2);
- mSurfaceView2.setZOrderMediaOverlay(true);
mSurfaceHolder = mSurfaceView.getHolder();
mSurfaceHolder.addCallback(callback);
mSurfaceView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() {
@@ -256,8 +275,11 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
}
}
});
- mSurfaceHolder2 = mSurfaceView2.getHolder();
- mSurfaceHolder2.addCallback(callback2);
+
+ mSurfaceViewMono = (AutoFitSurfaceView) mRootView.findViewById(R.id.mdp_preview_content_mono);
+ mSurfaceViewMono.setZOrderMediaOverlay(true);
+ mSurfaceHolderMono = mSurfaceViewMono.getHolder();
+ mSurfaceHolderMono.addCallback(callbackMono);
mRenderOverlay = (RenderOverlay) mRootView.findViewById(R.id.render_overlay);
mShutterButton = (ShutterButton) mRootView.findViewById(R.id.shutter_button);
@@ -269,6 +291,15 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
initSceneModeButton();
initSwitchCamera();
+ mTrackingFocusRenderer = new TrackingFocusRenderer(mActivity, mModule, this);
+ mRenderOverlay.addRenderer(mTrackingFocusRenderer);
+ String trackingFocus = mSettingsManager.getValue(SettingsManager.KEY_TRACKINGFOCUS);
+ if(trackingFocus != null && trackingFocus.equalsIgnoreCase("on")) {
+ mTrackingFocusRenderer.setVisible(true);
+ } else {
+ mTrackingFocusRenderer.setVisible(false);
+ }
+
mSwitcher = (ModuleSwitcher) mRootView.findViewById(R.id.camera_switcher);
mSwitcher.setCurrentIndex(ModuleSwitcher.PHOTO_MODULE_INDEX);
mSwitcher.setSwitchListener(mActivity);
@@ -316,11 +347,10 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
mCameraControls = (CameraControls) mRootView.findViewById(R.id.camera_controls);
mFaceView = (Camera2FaceView) mRootView.findViewById(R.id.face_view);
- Point size = new Point();
- mActivity.getWindowManager().getDefaultDisplay().getSize(size);
- mScreenRatio = CameraUtil.determineRatio(size.x, size.y);
+ mActivity.getWindowManager().getDefaultDisplay().getSize(mDisplaySize);
+ mScreenRatio = CameraUtil.determineRatio(mDisplaySize.x, mDisplaySize.y);
if (mScreenRatio == CameraUtil.RATIO_16_9) {
- int l = size.x > size.y ? size.x : size.y;
+ int l = mDisplaySize.x > mDisplaySize.y ? mDisplaySize.x : mDisplaySize.y;
int tm = mActivity.getResources().getDimensionPixelSize(R.dimen.preview_top_margin);
int bm = mActivity.getResources().getDimensionPixelSize(R.dimen.preview_bottom_margin);
mTopMargin = l / 4 * tm / (tm + bm);
@@ -366,6 +396,17 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
initializeSettingMenu();
initSceneModeButton();
initFilterModeButton();
+ if (mTrackingFocusRenderer != null) {
+ mTrackingFocusRenderer.setVisible(true);
+ }
+ if (mSurfaceViewMono != null) {
+ if (mSettingsManager != null && mSettingsManager.getValue(SettingsManager.KEY_MONO_PREVIEW) != null
+ && mSettingsManager.getValue(SettingsManager.KEY_MONO_PREVIEW).equalsIgnoreCase("on")) {
+ mSurfaceViewMono.setVisibility(View.VISIBLE);
+ } else {
+ mSurfaceViewMono.setVisibility(View.GONE);
+ }
+ }
}
// called from onResume but only the first time
@@ -1008,7 +1049,6 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
String[] keys = mSettingKeys;
if (mActivity.isDeveloperMenuEnabled()) {
- keys = mDeveloperKeys;
String[] combined = new String[mSettingKeys.length + mDeveloperKeys.length];
int idx = 0;
for (String key: mSettingKeys) {
@@ -1275,8 +1315,59 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
return mSurfaceHolder;
}
- public SurfaceHolder getSurfaceHolder2() {
- return mSurfaceHolder2;
+ private class MonoDummyListener implements Allocation.OnBufferAvailableListener {
+ ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic;
+ public MonoDummyListener(ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic) {
+ this.yuvToRgbIntrinsic = yuvToRgbIntrinsic;
+ }
+
+ @Override
+ public void onBufferAvailable(Allocation a) {
+ if(mMonoDummyAllocation != null) {
+ mMonoDummyAllocation.ioReceive();
+ mIsMonoDummyAllocationEverUsed = true;
+ if(mSurfaceViewMono.getVisibility() == View.VISIBLE) {
+ try {
+ yuvToRgbIntrinsic.forEach(mMonoDummyOutputAllocation);
+ mMonoDummyOutputAllocation.ioSend();
+ } catch(Exception e)
+ {
+ Log.e(TAG, e.toString());
+ }
+ }
+ }
+ }
+ }
+
+ public Surface getMonoDummySurface() {
+ if (mMonoDummyAllocation == null) {
+ RenderScript rs = RenderScript.create(mActivity);
+ Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
+ yuvTypeBuilder.setX(mPreviewWidth);
+ yuvTypeBuilder.setY(mPreviewHeight);
+ yuvTypeBuilder.setYuvFormat(ImageFormat.YUV_420_888);
+ mMonoDummyAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(), Allocation.USAGE_IO_INPUT|Allocation.USAGE_SCRIPT);
+ ScriptIntrinsicYuvToRGB yuvToRgbIntrinsic = ScriptIntrinsicYuvToRGB.create(rs, Element.RGBA_8888(rs));
+ yuvToRgbIntrinsic.setInput(mMonoDummyAllocation);
+
+ if(mSettingsManager.getValue(SettingsManager.KEY_MONO_PREVIEW).equalsIgnoreCase("on")) {
+ Type.Builder rgbTypeBuilder = new Type.Builder(rs, Element.RGBA_8888(rs));
+ rgbTypeBuilder.setX(mPreviewWidth);
+ rgbTypeBuilder.setY(mPreviewHeight);
+ mMonoDummyOutputAllocation = Allocation.createTyped(rs, rgbTypeBuilder.create(), Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT);
+ mMonoDummyOutputAllocation.setSurface(mSurfaceHolderMono.getSurface());
+ mActivity.runOnUiThread(new Runnable() {
+ public void run() {
+ mSurfaceHolderMono.setFixedSize(mPreviewWidth, mPreviewHeight);
+ mSurfaceViewMono.setVisibility(View.VISIBLE);
+ }
+ });
+ }
+ mMonoDummyAllocation.setOnBufferAvailableListener(new MonoDummyListener(yuvToRgbIntrinsic));
+
+ mIsMonoDummyAllocationEverUsed = false;
+ }
+ return mMonoDummyAllocation.getSurface();
}
public void showPreviewCover() {
@@ -1318,7 +1409,20 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
public void onPause() {
cancelCountDown();
collapseCameraControls();
+
if (mFaceView != null) mFaceView.clear();
+ if(mTrackingFocusRenderer != null) {
+ mTrackingFocusRenderer.setVisible(false);
+ }
+ if (mMonoDummyAllocation != null && mIsMonoDummyAllocationEverUsed) {
+ mMonoDummyAllocation.setOnBufferAvailableListener(null);
+ mMonoDummyAllocation.destroy();
+ mMonoDummyAllocation = null;
+ }
+ if (mMonoDummyOutputAllocation != null && mIsMonoDummyAllocationEverUsed) {
+ mMonoDummyOutputAllocation.destroy();
+ mMonoDummyOutputAllocation = null;
+ }
}
public boolean collapseCameraControls() {
@@ -1330,7 +1434,19 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
return ret;
}
+ public void showRefocusToast(boolean show) {
+ mCameraControls.showRefocusToast(show);
+ }
+
private FocusIndicator getFocusIndicator() {
+ String trackingFocus = mSettingsManager.getValue(SettingsManager.KEY_TRACKINGFOCUS);
+ if (trackingFocus != null && trackingFocus.equalsIgnoreCase("on")) {
+ if (mPieRenderer != null) {
+ mPieRenderer.clear();
+ }
+ return mTrackingFocusRenderer;
+ }
+
return (mFaceView != null && mFaceView.faceExists()) ? mFaceView : mPieRenderer;
}
@@ -1411,12 +1527,6 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
return point;
}
- public Point getSurfaceView2Size() {
- Point point = new Point();
- if (mSurfaceView2 != null) point.set(mSurfaceView2.getWidth(), mSurfaceView2.getHeight());
- return point;
- }
-
public void adjustOrientation() {
setOrientation(mOrientation, true);
}
@@ -1575,22 +1685,12 @@ public class CaptureUI implements FocusOverlayManager.FocusUI,
public void hideSurfaceView() {
mSurfaceView.setVisibility(View.INVISIBLE);
- mSurfaceView2.setVisibility(View.INVISIBLE);
}
public void showSurfaceView() {
mSurfaceView.getHolder().setFixedSize(mPreviewWidth, mPreviewHeight);
mSurfaceView.setAspectRatio(mPreviewHeight, mPreviewWidth);
mSurfaceView.setVisibility(View.VISIBLE);
- mSurfaceView2.setVisibility(View.VISIBLE);
- }
-
- public void setSurfaceView(boolean show) {
- if (show) {
- mSurfaceView2.setVisibility(View.VISIBLE);
- } else {
- mSurfaceView2.setVisibility(View.INVISIBLE);
- }
}
public boolean setPreviewSize(int width, int height) {
diff --git a/src/com/android/camera/PhotoMenu.java b/src/com/android/camera/PhotoMenu.java
index 083b019c4..6a1f08597 100644
--- a/src/com/android/camera/PhotoMenu.java
+++ b/src/com/android/camera/PhotoMenu.java
@@ -820,10 +820,13 @@ public class PhotoMenu extends MenuController
.findPreference(prefKey);
if (pref == null)
return;
- // Hide the camera control while switching the camera.
- // The camera control will be added back when
- // onCameraPickerClicked is completed
- mUI.hideUI();
+
+ if (prefKey.equals(CameraSettings.KEY_CAMERA_ID)) {
+ // Hide the camera control while switching the camera.
+ // The camera control will be added back when
+ // onCameraPickerClicked is completed
+ mUI.hideUI();
+ }
int index = pref.findIndexOfValue(pref.getValue());
CharSequence[] values = pref.getEntryValues();
index = (index + 1) % values.length;
diff --git a/src/com/android/camera/RefocusActivity.java b/src/com/android/camera/RefocusActivity.java
index ae13b484b..599d46656 100644
--- a/src/com/android/camera/RefocusActivity.java
+++ b/src/com/android/camera/RefocusActivity.java
@@ -30,6 +30,7 @@ package com.android.camera;
import java.io.File;
import java.io.FileInputStream;
+import java.io.IOException;
import java.io.OutputStream;
import android.animation.Animator;
@@ -43,6 +44,7 @@ import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
+import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Point;
import android.net.Uri;
@@ -50,12 +52,15 @@ import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Build;
import android.util.AttributeSet;
+import android.util.Log;
import android.view.Display;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
import android.widget.FrameLayout;
+import com.android.camera.exif.ExifInterface;
+
import org.codeaurora.snapcam.R;
public class RefocusActivity extends Activity {
@@ -77,19 +82,27 @@ public class RefocusActivity extends Activity {
private int mCurrentImage = -1;
private int mRequestedImage = -1;
private LoadImageTask mLoadImageTask;
+ private boolean mMapRotated = false;
+ private int mOrientation = 0;
+ public static final int MAP_ROTATED = 1;
+ private String mFilesPath;
@Override
public void onCreate(Bundle state) {
super.onCreate(state);
+ mFilesPath = getFilesDir()+"";
+ if(getIntent().getFlags() == MAP_ROTATED) {
+ mMapRotated = true;
+ mFilesPath = getFilesDir()+"/Ubifocus";
+ }
new Thread(new Runnable() {
public void run() {
- mDepthMap = new DepthMap(getFilesDir() + "/DepthMapImage.y");
+ mDepthMap = new DepthMap(mFilesPath + "/DepthMapImage.y");
}
}).start();
mUri = getIntent().getData();
- setResult(RESULT_CANCELED, new Intent());
setContentView(R.layout.refocus_editor);
mIndicator = (Indicator) findViewById(R.id.refocus_indicator);
@@ -129,6 +142,7 @@ public class RefocusActivity extends Activity {
findViewById(R.id.refocus_cancel).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(final View v) {
+ setResult(RESULT_CANCELED, new Intent());
finish();
}
});
@@ -137,11 +151,12 @@ public class RefocusActivity extends Activity {
@Override
public void onClick(final View v) {
if (mRequestedImage != NAMES.length - 1) {
- new SaveImageTask().execute(getFilesDir() + "/" + NAMES[mRequestedImage]
+ new SaveImageTask().execute(mFilesPath + "/" + NAMES[mRequestedImage]
+ ".jpg");
} else {
finish();
}
+ setResult(RESULT_OK, new Intent());
}
});
@@ -163,7 +178,7 @@ public class RefocusActivity extends Activity {
if (depth != mCurrentImage) {
mCurrentImage = depth;
mLoadImageTask = new LoadImageTask();
- mLoadImageTask.execute(getFilesDir() + "/" + NAMES[depth] + ".jpg");
+ mLoadImageTask.execute(mFilesPath + "/" + NAMES[depth] + ".jpg");
}
}
}
@@ -202,11 +217,16 @@ public class RefocusActivity extends Activity {
final BitmapFactory.Options o = new BitmapFactory.Options();
o.inJustDecodeBounds = true;
BitmapFactory.decodeFile(path[0], o);
-
+ ExifInterface exif = new ExifInterface();
+ mOrientation = 0;
+ try {
+ exif.readExif(path[0]);
+ mOrientation = Exif.getOrientation(exif);
+ } catch (IOException e) {
+ }
int h = o.outHeight;
int w = o.outWidth;
int sample = 1;
-
if (h > mHeight || w > mWidth) {
while (h / sample / 2 > mHeight && w / sample / 2 > mWidth) {
sample *= 2;
@@ -215,7 +235,14 @@ public class RefocusActivity extends Activity {
o.inJustDecodeBounds = false;
o.inSampleSize = sample;
- return BitmapFactory.decodeFile(path[0], o);
+ Bitmap bitmap = BitmapFactory.decodeFile(path[0], o);
+ if (mOrientation != 0) {
+ Matrix matrix = new Matrix();
+ matrix.setRotate(mOrientation);
+ bitmap = Bitmap.createBitmap(bitmap, 0, 0,
+ bitmap.getWidth(), bitmap.getHeight(), matrix, false);
+ }
+ return bitmap;
}
protected void onPostExecute(Bitmap result) {
@@ -258,6 +285,21 @@ public class RefocusActivity extends Activity {
int newX = (int) (x * mWidth);
int newY = (int) (y * mHeight);
+ if(mMapRotated) {
+ if(mOrientation == 0) {
+ newX = (int) (x * mWidth);
+ newY = (int) (y * mHeight);
+ } if(mOrientation == 90) {
+ newX = (int) ((y) * mWidth);
+ newY = (int) ((1 - x) * mHeight);
+ } else if (mOrientation == 180) {
+ newX = (int) ((1-x) * mWidth);
+ newY = (int) ((1-y) * mHeight);
+ } else if (mOrientation == 270) {
+ newX = (int) ((1-y) * mWidth);
+ newY = (int) ((x) * mHeight);
+ }
+ }
int[] hist = new int[256];
for (int i = 0; i < 256; i++) {
diff --git a/src/com/android/camera/SettingsManager.java b/src/com/android/camera/SettingsManager.java
index 37976f190..90f426090 100644
--- a/src/com/android/camera/SettingsManager.java
+++ b/src/com/android/camera/SettingsManager.java
@@ -44,7 +44,10 @@ import android.util.Range;
import android.util.Rational;
import android.util.Size;
+import com.android.camera.imageprocessor.filter.BeautificationFilter;
import com.android.camera.imageprocessor.filter.OptizoomFilter;
+import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener;
+import com.android.camera.imageprocessor.filter.UbifocusFilter;
import com.android.camera.ui.ListMenu;
import com.android.camera.util.SettingTranslation;
@@ -71,6 +74,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
// Custom-Scenemodes start from 100
public static final int SCENE_MODE_DUAL_INT = 100;
public static final int SCENE_MODE_OPTIZOOM_INT = 101;
+ public static final int SCENE_MODE_UBIFOCUS_INT = 102;
public static final String SCENE_MODE_DUAL_STRING = "100";
public static final String KEY_CAMERA_SAVEPATH = "pref_camera2_savepath_key";
public static final String KEY_RECORD_LOCATION = "pref_camera2_recordlocation_key";
@@ -79,6 +83,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
public static final String KEY_FLASH_MODE = "pref_camera2_flashmode_key";
public static final String KEY_WHITE_BALANCE = "pref_camera2_whitebalance_key";
public static final String KEY_MAKEUP = "pref_camera2_makeup_key";
+ public static final String KEY_TRACKINGFOCUS = "pref_camera2_trackingfocus_key";
public static final String KEY_CAMERA2 = "pref_camera2_camera2_key";
public static final String KEY_MONO_ONLY = "pref_camera2_mono_only_key";
public static final String KEY_MONO_PREVIEW = "pref_camera2_mono_preview_key";
@@ -136,7 +141,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
String cameraId = cameraIdList[i];
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
- Byte monoOnly = 0;
+ byte monoOnly = 0;
try {
monoOnly = characteristics.get(CaptureModule.MetaDataMonoOnlyKey);
}catch(Exception e) {
@@ -219,6 +224,17 @@ public class SettingsManager implements ListMenu.SettingsListener {
filterPreferences(cameraId);
initDepedencyTable();
initializeValueMap();
+ checkInitialDependency(cameraId);
+ }
+
+ private void checkInitialDependency(int cameraId) {
+ ListPreference videoQuality = mPreferenceGroup.findPreference(KEY_VIDEO_QUALITY);
+ if (videoQuality != null) {
+ String scene = getValue(SettingsManager.KEY_MAKEUP);
+ if(scene != null && scene.equalsIgnoreCase("on")) {
+ updateVideoQualityMenu(cameraId, 640, 480);
+ }
+ }
}
private void initDepedencyTable() {
@@ -447,6 +463,20 @@ public class SettingsManager implements ListMenu.SettingsListener {
else return CaptureModule.MONO_ID;
}
+ public void updateVideoQualityMenu(int cameraId, int maxWidth, int maxHeight) {
+ ListPreference videoQuality = mPreferenceGroup.findPreference(KEY_VIDEO_QUALITY);
+ if (videoQuality != null) {
+ List<String> sizes;
+ if(maxWidth < 0 && maxHeight < 0) {
+ sizes = getSupportedVideoSize(cameraId);
+ } else {
+ sizes = getSupportedVideoSize(cameraId, maxWidth, maxHeight);
+ }
+ CameraSettings.filterUnsupportedOptions(mPreferenceGroup,
+ videoQuality, sizes);
+ }
+ }
+
private void filterPreferences(int cameraId) {
// filter unsupported preferences
ListPreference whiteBalance = mPreferenceGroup.findPreference(KEY_WHITE_BALANCE);
@@ -467,6 +497,8 @@ public class SettingsManager implements ListMenu.SettingsListener {
ListPreference noiseReduction = mPreferenceGroup.findPreference(KEY_NOISE_REDUCTION);
ListPreference videoFlash = mPreferenceGroup.findPreference(KEY_VIDEO_FLASH_MODE);
ListPreference faceDetection = mPreferenceGroup.findPreference(KEY_FACE_DETECTION);
+ ListPreference makeup = mPreferenceGroup.findPreference(KEY_MAKEUP);
+ ListPreference trackingfocus = mPreferenceGroup.findPreference(KEY_TRACKINGFOCUS);
if (whiteBalance != null) {
CameraSettings.filterUnsupportedOptions(mPreferenceGroup,
@@ -502,7 +534,7 @@ public class SettingsManager implements ListMenu.SettingsListener {
iso, getSupportedIso(cameraId));
}
- if (iso != null) {
+ if (videoQuality != null) {
CameraSettings.filterUnsupportedOptions(mPreferenceGroup,
videoQuality, getSupportedVideoSize(cameraId));
}
@@ -542,6 +574,16 @@ public class SettingsManager implements ListMenu.SettingsListener {
if (!isFaceDetectionSupported(cameraId))
removePreference(mPreferenceGroup, KEY_FACE_DETECTION);
}
+
+ if (makeup != null) {
+ if (!BeautificationFilter.isSupportedStatic())
+ removePreference(mPreferenceGroup, KEY_MAKEUP);
+ }
+
+ if (trackingfocus != null) {
+ if (!TrackingFocusFrameListener.isSupportedStatic())
+ removePreference(mPreferenceGroup, KEY_TRACKINGFOCUS);
+ }
}
private void buildExposureCompensation(int cameraId) {
@@ -754,6 +796,19 @@ public class SettingsManager implements ListMenu.SettingsListener {
return res;
}
+ private List<String> getSupportedVideoSize(int cameraId, int maxWidth, int maxHeight) {
+ StreamConfigurationMap map = mCharacteristics.get(cameraId).get(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] sizes = map.getOutputSizes(MediaRecorder.class);
+ List<String> res = new ArrayList<>();
+ for (int i = 0; i < sizes.length; i++) {
+ if(sizes[i].getWidth() <= maxWidth && sizes[i].getHeight() <= maxHeight) {
+ res.add(sizes[i].toString());
+ }
+ }
+ return res;
+ }
+
private List<String> getSupportedRedeyeReduction(int cameraId) {
int[] flashModes = mCharacteristics.get(cameraId).get(CameraCharacteristics
.CONTROL_AE_AVAILABLE_MODES);
@@ -784,7 +839,8 @@ public class SettingsManager implements ListMenu.SettingsListener {
List<String> modes = new ArrayList<>();
modes.add("0"); // need special case handle for auto scene mode
if (mIsMonoCameraPresent) modes.add(SCENE_MODE_DUAL_STRING); // need special case handle for dual mode
- if (OptizoomFilter.isSupportedStatic()) modes.add(SCENE_MODE_OPTIZOOM_INT + ""); // need special case handle for dual mode
+ if (OptizoomFilter.isSupportedStatic()) modes.add(SCENE_MODE_OPTIZOOM_INT + "");
+ if (UbifocusFilter.isSupportedStatic() && cameraId == CaptureModule.BAYER_ID) modes.add(SCENE_MODE_UBIFOCUS_INT + "");
for (int mode : sceneModes) {
modes.add("" + mode);
}
diff --git a/src/com/android/camera/VideoModule.java b/src/com/android/camera/VideoModule.java
index 9e3c47cb9..612660450 100755
--- a/src/com/android/camera/VideoModule.java
+++ b/src/com/android/camera/VideoModule.java
@@ -1552,7 +1552,7 @@ public class VideoModule implements CameraModule,
// Profiles advertizes bitrate corresponding to published framerate.
// In case framerate is different, scale the bitrate
- int scaledBitrate = mProfile.videoBitRate * targetFrameRate / mProfile.videoFrameRate;
+ int scaledBitrate = mProfile.videoBitRate * (targetFrameRate / mProfile.videoFrameRate);
Log.i(TAG, "Scaled Video bitrate : " + scaledBitrate);
mMediaRecorder.setVideoEncodingBitRate(scaledBitrate);
}
diff --git a/src/com/android/camera/WideAnglePanoramaUI.java b/src/com/android/camera/WideAnglePanoramaUI.java
index 171036446..14907e842 100644
--- a/src/com/android/camera/WideAnglePanoramaUI.java
+++ b/src/com/android/camera/WideAnglePanoramaUI.java
@@ -639,7 +639,7 @@ public class WideAnglePanoramaUI implements
+ progressLayout.getChildAt(0).getHeight() / 2;
int[] x = { r / 2, r / 10, r * 9 / 10, r / 2 };
- int[] y = { t / 2, (t + b1) / 2, (t + b1) / 2, b1 + pivotY };
+ int[] y = { t / 2 + pivotY, (t + b1) / 2, (t + b1) / 2, b1 + pivotY };
int idx1, idx2;
int g;
diff --git a/src/com/android/camera/imageprocessor/FrameProcessor.java b/src/com/android/camera/imageprocessor/FrameProcessor.java
index 6a2091158..74db08b31 100644
--- a/src/com/android/camera/imageprocessor/FrameProcessor.java
+++ b/src/com/android/camera/imageprocessor/FrameProcessor.java
@@ -30,14 +30,7 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.android.camera.imageprocessor;
import android.app.Activity;
-import android.content.Context;
-import android.graphics.Bitmap;
-import android.graphics.BitmapFactory;
-import android.graphics.Canvas;
import android.graphics.ImageFormat;
-import android.graphics.Matrix;
-import android.graphics.Rect;
-import android.graphics.YuvImage;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
@@ -45,21 +38,23 @@ import android.os.HandlerThread;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
-import android.renderscript.ScriptIntrinsicYuvToRGB;
import android.renderscript.Type;
-import android.util.Log;
import android.util.Size;
import android.view.Surface;
+import android.widget.Toast;
import com.android.camera.CaptureModule;
-import com.android.camera.PhotoModule;
+import com.android.camera.SettingsManager;
import com.android.camera.imageprocessor.filter.BeautificationFilter;
import com.android.camera.imageprocessor.filter.ImageFilter;
-import com.android.camera.util.CameraUtil;
+import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener;
+import com.android.camera.ui.RotateTextToast;
-import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.Semaphore;
+import org.codeaurora.snapcam.R;
public class FrameProcessor {
@@ -67,13 +62,17 @@ public class FrameProcessor {
private Allocation mInputAllocation;
private Allocation mProcessAllocation;
private Allocation mOutputAllocation;
+ private Allocation mVideoOutputAllocation;
private HandlerThread mProcessingThread;
private Handler mProcessingHandler;
private HandlerThread mOutingThread;
private Handler mOutingHandler;
+ private HandlerThread mListeningThread;
+ private Handler mListeningHandler;
- public ProcessingTask mTask;
+ private ProcessingTask mTask;
+ private ListeningTask mListeningTask;
private RenderScript mRs;
private Activity mActivity;
ScriptC_YuvToRgb mRsYuvToRGB;
@@ -84,10 +83,13 @@ public class FrameProcessor {
private ArrayList<ImageFilter> mPreviewFilters;
private ArrayList<ImageFilter> mFinalFilters;
private Surface mSurfaceAsItIs;
+ private Surface mVideoSurfaceAsItIs;
private boolean mIsActive = false;
public static final int FILTER_NONE = 0;
public static final int FILTER_MAKEUP = 1;
+ public static final int LISTENER_TRACKING_FOCUS = 2;
private CaptureModule mModule;
+ private boolean mIsVideoOn = false;
public FrameProcessor(Activity activity, CaptureModule module) {
mActivity = activity;
@@ -97,6 +99,7 @@ public class FrameProcessor {
}
public void init(Size previewDim) {
+ mIsActive = true;
mSize = previewDim;
synchronized (mAllocationLock) {
mRs = RenderScript.create(mActivity);
@@ -122,6 +125,13 @@ public class FrameProcessor {
mOutingHandler = new Handler(mOutingThread.getLooper());
}
+ if(mListeningThread == null) {
+ mListeningThread = new HandlerThread("FrameListeningThread");
+ mListeningThread.start();
+ mListeningHandler = new Handler(mListeningThread.getLooper());
+ }
+
+ mListeningTask = new ListeningTask();
mTask = new ProcessingTask();
mInputImageReader.setOnImageAvailableListener(mTask, mProcessingHandler);
mIsAllocationEverUsed = false;
@@ -153,12 +163,12 @@ public class FrameProcessor {
}
private void cleanFilterSet() {
- if(mPreviewFilters != null) {
+ if (mPreviewFilters != null) {
for (ImageFilter filter : mPreviewFilters) {
filter.deinit();
}
}
- if(mFinalFilters != null) {
+ if (mFinalFilters != null) {
for (ImageFilter filter : mFinalFilters) {
filter.deinit();
}
@@ -168,25 +178,29 @@ public class FrameProcessor {
}
public void onOpen(ArrayList<Integer> filterIds) {
- mIsActive = true;
- synchronized (mAllocationLock) {
- cleanFilterSet();
- if (filterIds != null) {
- for (Integer i : filterIds) {
- addFilter(i.intValue());
- }
+ cleanFilterSet();
+ if (filterIds != null) {
+ for (Integer i : filterIds) {
+ addFilter(i.intValue());
}
}
}
private void addFilter(int filterId) {
- if(filterId == FILTER_MAKEUP) {
- ImageFilter filter = new BeautificationFilter(mModule);
- if(filter.isSupported()) {
- mPreviewFilters.add(filter);
+ ImageFilter filter = null;
+ if (filterId == FILTER_MAKEUP) {
+ filter = new BeautificationFilter(mModule);
+ } else if (filterId == LISTENER_TRACKING_FOCUS) {
+ filter = new TrackingFocusFrameListener(mModule);
+ }
+
+ if (filter != null && filter.isSupported()) {
+ mPreviewFilters.add(filter);
+ if (!filter.isFrameListener()) {
mFinalFilters.add(filter);
}
}
+
}
public void onClose() {
@@ -202,6 +216,9 @@ public class FrameProcessor {
if (mProcessAllocation != null) {
mProcessAllocation.destroy();
}
+ if (mVideoOutputAllocation != null) {
+ mVideoOutputAllocation.destroy();
+ }
}
if (mRs != null) {
mRs.destroy();
@@ -210,6 +227,7 @@ public class FrameProcessor {
mProcessAllocation = null;
mOutputAllocation = null;
mInputAllocation = null;
+ mVideoOutputAllocation = null;
}
if (mProcessingThread != null) {
mProcessingThread.quitSafely();
@@ -229,6 +247,15 @@ public class FrameProcessor {
} catch (InterruptedException e) {
}
}
+ if (mListeningThread != null) {
+ mListeningThread.quitSafely();
+ try {
+ mListeningThread.join();
+ mListeningThread = null;
+ mListeningHandler = null;
+ } catch (InterruptedException e) {
+ }
+ }
for(ImageFilter filter : mPreviewFilters) {
filter.deinit();
}
@@ -237,32 +264,75 @@ public class FrameProcessor {
}
}
- public Surface getInputSurface() {
- if(mPreviewFilters.size() == 0) {
- return mSurfaceAsItIs;
- }
+ private Surface getReaderSurface() {
synchronized (mAllocationLock) {
- if (mInputImageReader == null)
+ if (mInputImageReader == null) {
return null;
+ }
return mInputImageReader.getSurface();
}
}
+ public List<Surface> getInputSurfaces() {
+ List<Surface> surfaces = new ArrayList<Surface>();
+ if(mPreviewFilters.size() == 0 && mFinalFilters.size() == 0) {
+ surfaces.add(mSurfaceAsItIs);
+ if(mIsVideoOn) {
+ surfaces.add(mVideoSurfaceAsItIs);
+ }
+ } else if (mFinalFilters.size() == 0) {
+ surfaces.add(mSurfaceAsItIs);
+ if(mIsVideoOn) {
+ surfaces.add(mVideoSurfaceAsItIs);
+ }
+ surfaces.add(getReaderSurface());
+ } else {
+ surfaces.add(getReaderSurface());
+ }
+ return surfaces;
+ }
+
public boolean isFrameFilterEnabled() {
- if(mPreviewFilters.size() == 0) {
+ if(mFinalFilters.size() == 0) {
return false;
}
return true;
}
public void setOutputSurface(Surface surface) {
- if(mPreviewFilters.size() == 0) {
- mSurfaceAsItIs = surface;
- } else {
+ mSurfaceAsItIs = surface;
+ if(mFinalFilters.size() != 0) {
mOutputAllocation.setSurface(surface);
}
}
+ public void setVideoOutputSurface(Surface surface) {
+ if(surface == null) {
+ synchronized (mAllocationLock) {
+ if (mVideoOutputAllocation != null) {
+ mVideoOutputAllocation.destroy();
+ }
+ mVideoOutputAllocation = null;
+ }
+ mIsVideoOn = false;
+ return;
+ }
+ mVideoSurfaceAsItIs = surface;
+ mIsVideoOn = true;
+ if(mFinalFilters.size() != 0) {
+ synchronized (mAllocationLock) {
+ if (mVideoOutputAllocation == null) {
+ Type.Builder rgbTypeBuilder = new Type.Builder(mRs, Element.RGBA_8888(mRs));
+ rgbTypeBuilder.setX(mSize.getHeight());
+ rgbTypeBuilder.setY(mSize.getWidth());
+ mVideoOutputAllocation = Allocation.createTyped(mRs, rgbTypeBuilder.create(),
+ Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT);
+ }
+ mVideoOutputAllocation.setSurface(surface);
+ }
+ }
+ }
+
class ProcessingTask implements Runnable, ImageReader.OnImageAvailableListener {
byte[] yvuBytes = null;
int ySize;
@@ -276,12 +346,14 @@ public class FrameProcessor {
@Override
public void onImageAvailable(ImageReader reader) {
synchronized (mAllocationLock) {
- if(mOutputAllocation == null)
+ if(mOutputAllocation == null) {
return;
+ }
try {
Image image = reader.acquireLatestImage();
- if(image == null)
+ if(image == null) {
return;
+ }
if(!mIsActive) {
image.close();
return;
@@ -289,23 +361,35 @@ public class FrameProcessor {
mIsAllocationEverUsed = true;
ByteBuffer bY = image.getPlanes()[0].getBuffer();
ByteBuffer bVU = image.getPlanes()[2].getBuffer();
- if(yvuBytes == null) {
+ if(yvuBytes == null || width != mSize.getWidth() || height != mSize.getHeight()) {
stride = image.getPlanes()[0].getRowStride();
width = mSize.getWidth();
height = mSize.getHeight();
ySize = stride * mSize.getHeight();
yvuBytes = new byte[ySize*3/2];
}
+ boolean needToFeedSurface = false;
//Start processing yvu buf
for (ImageFilter filter : mPreviewFilters) {
- filter.init(mSize.getWidth(), mSize.getHeight(), stride, stride);
- filter.addImage(bY, bVU, 0, new Boolean(true));
+ if(filter.isFrameListener()) {
+ if (mListeningTask.setParam(filter, bY, bVU, mSize.getWidth(), mSize.getHeight(), stride)) {
+ mListeningHandler.post(mListeningTask);
+ }
+ } else {
+ filter.init(mSize.getWidth(), mSize.getHeight(), stride, stride);
+ filter.addImage(bY, bVU, 0, new Boolean(true));
+ needToFeedSurface = true;
+ }
+ bY.rewind();
+ bVU.rewind();
}
//End processing yvu buf
- bY.get(yvuBytes, 0, bY.remaining());
- bVU.get(yvuBytes, ySize, bVU.remaining());
+ if(needToFeedSurface) {
+ bY.get(yvuBytes, 0, bY.remaining());
+ bVU.get(yvuBytes, ySize, bVU.remaining());
+ mOutingHandler.post(this);
+ }
image.close();
- mOutingHandler.post(this);
} catch (IllegalStateException e) {
}
}
@@ -318,12 +402,68 @@ public class FrameProcessor {
return;
}
if(mInputAllocation == null) {
- createAllocation(stride, height, stride-width);
+ createAllocation(stride, height, stride - width);
}
mInputAllocation.copyFrom(yvuBytes);
mRsRotator.forEach_rotate90andMerge(mInputAllocation);
mRsYuvToRGB.forEach_nv21ToRgb(mOutputAllocation);
mOutputAllocation.ioSend();
+ if(mVideoOutputAllocation != null) {
+ mVideoOutputAllocation.copyFrom(mOutputAllocation);
+ mVideoOutputAllocation.ioSend();
+ }
+ }
+ }
+ }
+
+ class ListeningTask implements Runnable {
+
+ ImageFilter mFilter;
+ ByteBuffer mBY = null, mBVU = null;
+ int mWidth, mHeight, mStride;
+ int bYSize, bVUSize;
+ Semaphore mMutureLock = new Semaphore(1);
+
+ public boolean setParam(ImageFilter filter, ByteBuffer bY, ByteBuffer bVU, int width, int height, int stride) {
+ if(!mIsActive) {
+ return false;
+ }
+ if (!mMutureLock.tryAcquire()) {
+ return false;
+ }
+ mFilter = filter;
+ if (mBY == null || bYSize != bY.remaining()) {
+ bYSize = bY.remaining();
+ mBY = ByteBuffer.allocateDirect(bYSize);
+ }
+ if (mBVU == null || bVUSize != bVU.remaining()) {
+ bVUSize = bVU.remaining();
+ mBVU = ByteBuffer.allocateDirect(bVUSize);
+ }
+ mBY.rewind();
+ mBVU.rewind();
+ mBY.put(bY);
+ mBVU.put(bVU);
+ mWidth = width;
+ mHeight = height;
+ mStride = stride;
+ mMutureLock.release();
+ return true;
+ }
+
+ @Override
+ public void run() {
+ try {
+ if (!mIsActive) {
+ return;
+ }
+ mMutureLock.acquire();
+ mBY.rewind();
+ mBVU.rewind();
+ mFilter.init(mWidth, mHeight, mStride, mStride);
+ mFilter.addImage(mBY, mBVU, 0, new Boolean(true));
+ mMutureLock.release();
+ } catch (InterruptedException e) {
}
}
}
diff --git a/src/com/android/camera/imageprocessor/PostProcessor.java b/src/com/android/camera/imageprocessor/PostProcessor.java
index f39845346..8c0d2ff88 100644
--- a/src/com/android/camera/imageprocessor/PostProcessor.java
+++ b/src/com/android/camera/imageprocessor/PostProcessor.java
@@ -29,9 +29,12 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.android.camera.imageprocessor;
import android.content.ContentResolver;
+import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
import android.media.Image;
import android.media.ImageReader;
@@ -50,9 +53,11 @@ import com.android.camera.SettingsManager;
import com.android.camera.exif.ExifInterface;
import com.android.camera.imageprocessor.filter.OptizoomFilter;
import com.android.camera.imageprocessor.filter.SharpshooterFilter;
+import com.android.camera.imageprocessor.filter.UbifocusFilter;
import com.android.camera.ui.RotateTextToast;
import java.io.ByteArrayOutputStream;
+import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
@@ -70,7 +75,8 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
public static final int FILTER_NONE = 0;
public static final int FILTER_OPTIZOOM = 1;
public static final int FILTER_SHARPSHOOTER = 2;
- public static final int FILTER_MAX = 3;
+ public static final int FILTER_UBIFOCUS = 3;
+ public static final int FILTER_MAX = 4;
private int mCurrentNumImage = 0;
private ImageFilter mFilter;
@@ -86,6 +92,7 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
private Image[] mImages;
private PhotoModule.NamedImages mNamedImages;
private WatchdogThread mWatchdog;
+ private int mOrientation = 0;
//This is for the debug feature.
private static boolean DEBUG_FILTER = false;
@@ -139,11 +146,17 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
}
}
+ public boolean isManualMode() {
+ return mFilter.isManualMode();
+ }
+
+ public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) throws CameraAccessException {
+ mFilter.manualCapture(builder, captureSession, callback, handler);
+ }
+
public boolean isFilterOn() {
- if(mFilter != null) {
- return true;
- }
- if(mController.getFrameFilters().size() != 0) {
+ if (mFilter != null) {
return true;
}
return false;
@@ -269,6 +282,9 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
case FILTER_SHARPSHOOTER:
mFilter = new SharpshooterFilter(mController);
break;
+ case FILTER_UBIFOCUS:
+ mFilter = new UbifocusFilter(mController, mActivity);
+ break;
}
}
@@ -333,6 +349,7 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
if(mWatchdog != null) {
mWatchdog.startMonitor();
}
+ mOrientation = CameraUtil.getJpegRotation(mController.getMainCameraId(), mController.getDisplayOrientation());
}
if(mFilter != null && mCurrentNumImage >= mFilter.getNumRequiredImage()) {
return;
@@ -374,7 +391,7 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
});
}
- private byte[] addExifTags(byte[] jpeg, int orientationInDegree) {
+ public static byte[] addExifTags(byte[] jpeg, int orientationInDegree) {
ExifInterface exif = new ExifInterface();
exif.addOrientationTag(orientationInDegree);
exif.addDateTimeStampTag(ExifInterface.TAG_DATE_TIME, System.currentTimeMillis(),
@@ -434,17 +451,16 @@ public class PostProcessor implements ImageReader.OnImageAvailableListener{
) {
Log.e(TAG, "Processed outRoi is not within picture range");
} else {
- int orientation = CameraUtil.getJpegRotation(mController.getMainCameraId(), mController.getDisplayOrientation());
if(mFilter != null && DEBUG_FILTER) {
- bytes = nv21ToJpeg(mDebugResultImage, orientation);
+ bytes = nv21ToJpeg(mDebugResultImage, mOrientation);
mActivity.getMediaSaveService().addImage(
bytes, title + "_beforeApplyingFilter", date, null, mDebugResultImage.outRoi.width(), mDebugResultImage.outRoi.height(),
- orientation, null, mediaSavedListener, contentResolver, "jpeg");
+ mOrientation, null, mediaSavedListener, contentResolver, "jpeg");
}
- bytes = nv21ToJpeg(resultImage, orientation);
+ bytes = nv21ToJpeg(resultImage, mOrientation);
mActivity.getMediaSaveService().addImage(
bytes, title, date, null, resultImage.outRoi.width(), resultImage.outRoi.height(),
- orientation, null, mediaSavedListener, contentResolver, "jpeg");
+ mOrientation, null, mediaSavedListener, contentResolver, "jpeg");
mController.updateThumbnailJpegData(bytes);
}
}
diff --git a/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java b/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java
index 6ec9376d0..77e6ead6e 100644
--- a/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java
@@ -30,8 +30,10 @@ package com.android.camera.imageprocessor.filter;
import android.graphics.Rect;
import android.hardware.Camera;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.Face;
+import android.os.Handler;
import android.util.Log;
import android.util.Size;
@@ -120,6 +122,26 @@ public class BeautificationFilter implements ImageFilter {
return mIsSupported;
}
+ public static boolean isSupportedStatic() {
+ return mIsSupported;
+ }
+
+ @Override
+ public boolean isFrameListener() {
+ return false;
+ }
+
+ @Override
+ public boolean isManualMode() {
+ return false;
+ }
+
+ @Override
+ public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) {
+
+ }
+
private native int nativeBeautificationProcess(ByteBuffer yB, ByteBuffer vuB,
int width, int height, int stride, int fleft, int ftop, int fright, int fbottom);
diff --git a/src/com/android/camera/imageprocessor/filter/ImageFilter.java b/src/com/android/camera/imageprocessor/filter/ImageFilter.java
index e62d9b30a..bb581c965 100644
--- a/src/com/android/camera/imageprocessor/filter/ImageFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/ImageFilter.java
@@ -29,7 +29,10 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.android.camera.imageprocessor.filter;
import android.graphics.Rect;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
+import android.os.Handler;
import java.nio.ByteBuffer;
import java.util.List;
@@ -41,6 +44,7 @@ public interface ImageFilter {
String getStringName();
+ /* This is used for auto mode burst picture */
int getNumRequiredImage();
void init(int width, int height, int strideY, int strideVU);
@@ -71,4 +75,14 @@ public interface ImageFilter {
this.stride = stride;
}
}
+
+ /* Whether it is post proc filter or frame proc filter */
+ boolean isFrameListener();
+
+ /* Whether it will use burst capture or manual capture */
+ boolean isManualMode();
+
+ /* if it's manual mode, this function has to be implemented */
+ void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) throws CameraAccessException;
}
diff --git a/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java b/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java
index 9b5af29dc..486ea7a0c 100644
--- a/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java
@@ -29,7 +29,9 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.android.camera.imageprocessor.filter;
import android.graphics.Rect;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
+import android.os.Handler;
import android.util.Log;
import com.android.camera.CaptureModule;
@@ -127,6 +129,21 @@ public class OptizoomFilter implements ImageFilter{
return mIsSupported;
}
+ @Override
+ public boolean isFrameListener() {
+ return false;
+ }
+
+ @Override
+ public boolean isManualMode() {
+ return false;
+ }
+
+ @Override
+ public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) {
+ }
+
public static boolean isSupportedStatic() {
return mIsSupported;
}
diff --git a/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java b/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java
index 74469afc3..c51e13c59 100644
--- a/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java
@@ -29,9 +29,11 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package com.android.camera.imageprocessor.filter;
import android.graphics.Rect;
+import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.os.Handler;
import android.util.Log;
import android.util.Range;
import android.util.Rational;
@@ -150,6 +152,22 @@ public class SharpshooterFilter implements ImageFilter{
return mIsSupported;
}
+ @Override
+ public boolean isFrameListener() {
+ return false;
+ }
+
+ @Override
+ public boolean isManualMode() {
+ return false;
+ }
+
+ @Override
+ public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) {
+
+ }
+
public static boolean isSupportedStatic() {
return mIsSupported;
}
diff --git a/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java b/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java
new file mode 100644
index 000000000..8d9a7fc1a
--- /dev/null
+++ b/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java
@@ -0,0 +1,293 @@
+/*
+Copyright (c) 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.imageprocessor.filter;
+
+import android.graphics.Point;
+import android.graphics.Rect;
+import android.hardware.Camera;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.params.Face;
+import android.os.Handler;
+import android.util.Log;
+import android.util.Size;
+
+import com.android.camera.CaptureModule;
+import com.android.camera.ui.FilmstripBottomControls;
+import com.android.camera.ui.TrackingFocusRenderer;
+
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.List;
+
+public class TrackingFocusFrameListener implements ImageFilter {
+
+ int mWidth;
+ int mHeight;
+ int mStrideY;
+ int mStrideVU;
+ private CaptureModule mModule;
+ private static boolean DEBUG = false;
+ private static String TAG = "TrackingFocusFrameListener";
+ private static boolean mIsSupported = false;
+ private Rect imageRect;
+ public static final int PENDING_REGISTRATION = -1;
+ public static final int MAX_NUM_TRACKED_OBJECTS = 3;
+ private int mTrackedId = PENDING_REGISTRATION;
+ private boolean mIsInitialzed = false;
+ private TrackingFocusRenderer mTrackingFocusRender;
+ byte[] yvuBytes = null;
+ private int[] mInputCords = null;
+ private boolean mIsFirstTime = true;
+
+ public enum OperationMode {
+ DEFAULT,
+ PERFORMANCE,
+ CPU_OFFLOAD,
+ LOW_POWER
+ }
+
+ public enum Precision {
+ HIGH,
+ LOW
+ }
+
+ public TrackingFocusFrameListener(CaptureModule module) {
+ mModule = module;
+ }
+
+ @Override
+ public List<CaptureRequest> setRequiredImages(CaptureRequest.Builder builder) {
+ return null;
+ }
+
+ @Override
+ public String getStringName() {
+ return "TrackingFocusFrameListener";
+ }
+
+ @Override
+ public int getNumRequiredImage() {
+ return 1;
+ }
+
+ @Override
+ public void init(int width, int height, int strideY, int strideVU) {
+ mWidth = width;
+ mHeight = height;
+ mStrideY = strideY;
+ mStrideVU = strideVU;
+ if(!mIsInitialzed) {
+ if (nInit(OperationMode.PERFORMANCE.ordinal(), Precision.HIGH.ordinal(), mWidth, mHeight, mStrideY) < 0) {
+ Log.e(TAG, "Initialization failed.");
+ }
+ imageRect = new Rect(0, 0, width, height);
+ mTrackingFocusRender = mModule.getTrackingForcusRenderer();
+ yvuBytes = new byte[mStrideY*mHeight*3/2];
+ mIsInitialzed = true;
+ }
+ }
+
+ @Override
+ public void deinit() {
+ if (mIsInitialzed) {
+ nRelease();
+ mIsInitialzed = false;
+ }
+ }
+
+ @Override
+ public void addImage(ByteBuffer bY, ByteBuffer bVU, int imageNum, Object isPreview) {
+ bY.get(yvuBytes, 0, bY.remaining());
+ bVU.get(yvuBytes, mStrideY * mHeight, bVU.remaining());
+ int[] cords = mTrackingFocusRender.getInputCords(mWidth, mHeight);
+ if(cords != null) {
+ if(mTrackedId != PENDING_REGISTRATION) {
+ unregisterObject(mTrackedId);
+ mTrackedId = PENDING_REGISTRATION;
+ }
+ mIsFirstTime = true;
+ mInputCords = cords;
+ }
+ if(mInputCords != null) {
+ if (mTrackedId == PENDING_REGISTRATION) {
+ try {
+ mTrackedId = registerObject(yvuBytes, new Point(mInputCords[0], mInputCords[1]), mIsFirstTime);
+ mIsFirstTime = false;
+ }catch(IllegalArgumentException e) {
+ mTrackedId = PENDING_REGISTRATION;
+ Log.e(TAG, e.toString());
+ }
+ }
+ if(mTrackedId != PENDING_REGISTRATION) {
+ mTrackingFocusRender.putRegisteredCords(trackObjects(yvuBytes), mWidth, mHeight);
+ }
+ }
+ }
+
+ public static class Result {
+ public final int id;
+ public final int confidence;
+ public Rect pos;
+
+ private Result(int id, int confidence, int left, int top, int right, int bottom) {
+ this.id = id;
+ this.confidence = confidence;
+ this.pos = new Rect(left, top, right, bottom);
+ }
+
+ public static Result Copy(Result old) {
+ Result result = new Result(old.id, old.confidence, old.pos.left, old.pos.top, old.pos.right, old.pos.bottom);
+ return result;
+ }
+ }
+
+ public int getMinRoiDimension() {
+ if (!mIsInitialzed) {
+ throw new IllegalArgumentException("already released");
+ }
+
+ return nGetMinRoiDimension();
+ }
+
+ public int getMaxRoiDimension() {
+ if (!mIsInitialzed) {
+ throw new IllegalArgumentException("already released");
+ }
+
+ return nGetMaxRoiDimension();
+ }
+
+ public int registerObject(byte[] imageDataNV21, Rect rect)
+ {
+ if (imageDataNV21 == null || imageDataNV21.length < getMinFrameSize()) {
+ throw new IllegalArgumentException("imageDataNV21 null or too small to encode frame");
+ } else if (rect == null || rect.isEmpty() || !imageRect.contains(rect)) {
+ throw new IllegalArgumentException("rect must be non-empty and be entirely inside " +
+ "the frame");
+ } else if (!mIsInitialzed) {
+ throw new IllegalArgumentException("already released");
+ }
+ int id = nRegisterObjectByRect(imageDataNV21, rect.left, rect.top, rect.right, rect.bottom);
+ if(id == 0) {
+ id = PENDING_REGISTRATION;
+ }
+ mTrackedId = id;
+ return mTrackedId;
+ }
+
+ public int registerObject(byte[] imageDataNV21, Point point, boolean firstTime)
+ {
+ if (imageDataNV21 == null || imageDataNV21.length < getMinFrameSize()) {
+ throw new IllegalArgumentException("imageDataNV21 null or too small to encode frame"
+ + imageDataNV21.length+ " "+getMinFrameSize());
+ } else if (point == null || !imageRect.contains(point.x, point.y)) {
+ throw new IllegalArgumentException("point is outside the image frame: "+imageRect.toString());
+ } else if (!mIsInitialzed) {
+ throw new IllegalArgumentException("already released");
+ }
+ int id = nRegisterObjectByPoint(imageDataNV21, point.x, point.y, firstTime);
+ if(id == 0) {
+ id = PENDING_REGISTRATION;
+ }
+ mTrackedId = id;
+ return mTrackedId;
+ }
+
+ public void unregisterObject(int id)
+ {
+ if (id == PENDING_REGISTRATION) {
+ Log.e(TAG, "There's a pending object");
+ } else if (!mIsInitialzed) {
+ Log.e(TAG, "already released");
+ }
+ nUnregisterObject(id);
+ }
+
+ public Result trackObjects(byte[] imageDataNV21)
+ {
+ if (imageDataNV21 == null || imageDataNV21.length < getMinFrameSize()) {
+ Log.e(TAG, "imageDataNV21 null or too small to encode frame "
+ + imageDataNV21.length+ " "+getMinFrameSize());
+ } else if (!mIsInitialzed) {
+ Log.e(TAG, "It's released");
+ }
+
+ int[] nResults = nTrackObjects(imageDataNV21);
+ return new Result(nResults[0], nResults[1], nResults[2], nResults[3], nResults[4], nResults[5]);
+ }
+
+ private int getMinFrameSize() {
+ return ((mStrideY * imageRect.bottom * 3) / 2);
+ }
+
+ @Override
+ public ResultImage processImage() {
+ return null;
+ }
+
+ @Override
+ public boolean isSupported() {
+ return mIsSupported;
+ }
+
+ public static boolean isSupportedStatic() {
+ return mIsSupported;
+ }
+
+ @Override
+ public boolean isFrameListener() {
+ return true;
+ }
+
+ @Override
+ public boolean isManualMode() {
+ return false;
+ }
+
+ @Override
+ public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) {
+
+ }
+
+ private native int nInit(int operationMode, int precision, int width, int height, int stride);
+ private native void nRelease();
+ private native int nGetMinRoiDimension();
+ private native int nGetMaxRoiDimension();
+ private native int nRegisterObjectByRect(byte[] imageDataNV21, int left, int top, int right, int bottom);
+ private native int nRegisterObjectByPoint(byte[] imageDataNV21, int x, int y, boolean firstTime);
+ private native void nUnregisterObject(int id);
+ private native int[] nTrackObjects(byte[] imageDataNV21);
+
+ static {
+ mIsSupported = false;
+ }
+}
diff --git a/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java b/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java
new file mode 100644
index 000000000..b410e01b4
--- /dev/null
+++ b/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java
@@ -0,0 +1,269 @@
+/*
+Copyright (c) 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+package com.android.camera.imageprocessor.filter;
+
+import android.app.ProgressDialog;
+import android.content.Context;
+import android.graphics.Camera;
+import android.graphics.ImageFormat;
+import android.graphics.Rect;
+import android.graphics.YuvImage;
+import android.hardware.camera2.CameraAccessException;
+import android.hardware.camera2.CameraCaptureSession;
+import android.hardware.camera2.CameraCharacteristics;
+import android.hardware.camera2.CaptureRequest;
+import android.hardware.camera2.CaptureResult;
+import android.os.Handler;
+import android.util.Log;
+import android.util.Range;
+
+import com.android.camera.CameraActivity;
+import com.android.camera.CaptureModule;
+import com.android.camera.imageprocessor.PostProcessor;
+import com.android.camera.util.CameraUtil;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+
+public class UbifocusFilter implements ImageFilter {
+ public static final int NUM_REQUIRED_IMAGE = 5;
+ private int mWidth;
+ private int mHeight;
+ private int mStrideY;
+ private int mStrideVU;
+ private static String TAG = "UbifocusFilter";
+ private static final boolean DEBUG = true;
+ private static final int FOCUS_ADJUST_TIME_OUT = 200;
+ private static final int META_BYTES_SIZE = 25;
+ private int temp;
+ private static boolean mIsSupported = true;
+ private ByteBuffer mOutBuf;
+ private CaptureModule mModule;
+ private CameraActivity mActivity;
+ private int mOrientation = 0;
+ private float mMinFocusDistance = -1f;
+ final String[] NAMES = {"00.jpg", "01.jpg", "02.jpg", "03.jpg",
+ "04.jpg", "DepthMapImage.y", "AllFocusImage.jpg"};
+
+ private static void Log(String msg) {
+ if (DEBUG) {
+ Log.d(TAG, msg);
+ }
+ }
+
+ public UbifocusFilter(CaptureModule module, CameraActivity activity) {
+ mModule = module;
+ mActivity = activity;
+ }
+
+ @Override
+ public List<CaptureRequest> setRequiredImages(CaptureRequest.Builder builder) {
+ return null;
+ }
+
+ @Override
+ public String getStringName() {
+ return "UbifocusFilter";
+ }
+
+ @Override
+ public int getNumRequiredImage() {
+ return NUM_REQUIRED_IMAGE;
+ }
+
+ @Override
+ public void init(int width, int height, int strideY, int strideVU) {
+ Log("init");
+ mWidth = width/2*2;
+ mHeight = height/2*2;
+ mStrideY = strideY/2*2;
+ mStrideVU = strideVU/2*2;
+ mOutBuf = ByteBuffer.allocate(mStrideY * mHeight * 3 / 2);
+ Log("width: "+mWidth+" height: "+mHeight+" strideY: "+mStrideY+" strideVU: "+mStrideVU);
+ nativeInit(mWidth, mHeight, mStrideY, mStrideVU, NUM_REQUIRED_IMAGE);
+ }
+
+ @Override
+ public void deinit() {
+ Log("deinit");
+ mOutBuf = null;
+ nativeDeinit();
+ }
+
+ @Override
+ public void addImage(ByteBuffer bY, ByteBuffer bVU, int imageNum, Object param) {
+ Log("addImage");
+ if(imageNum == 0) {
+ mModule.setRefocusLastTaken(false);
+ mOrientation = CameraUtil.getJpegRotation(mModule.getMainCameraId(), mModule.getDisplayOrientation());
+ }
+ saveToPrivateFile(imageNum, nv21ToJpeg(bY, bVU, new Rect(0, 0, mWidth, mHeight), mOrientation));
+ int yActualSize = bY.remaining();
+ int vuActualSize = bVU.remaining();
+ if(nativeAddImage(bY, bVU, yActualSize, vuActualSize, imageNum) < 0) {
+ Log.e(TAG, "Fail to add image");
+ }
+ }
+
+ @Override
+ public ResultImage processImage() {
+ Log("processImage ");
+ int[] roi = new int[4];
+ int[] depthMapSize = new int[2];
+ int status = nativeProcessImage(mOutBuf.array(), roi, depthMapSize);
+ if(status < 0) { //In failure case, library will return the first image as it is.
+ Log.w(TAG, "Fail to process the "+getStringName());
+ } else {
+ byte[] depthMapBuf = new byte[depthMapSize[0] * depthMapSize[1] + META_BYTES_SIZE];
+ nativeGetDepthMap(depthMapBuf, depthMapSize[0], depthMapSize[1]);
+ saveToPrivateFile(NAMES.length - 2, depthMapBuf);
+ saveToPrivateFile(NAMES.length - 1, nv21ToJpeg(mOutBuf, null, new Rect(roi[0], roi[1], roi[0] + roi[2], roi[1] + roi[3]), mOrientation));
+ mModule.setRefocusLastTaken(true);
+ }
+
+ ResultImage result = new ResultImage(mOutBuf, new Rect(roi[0], roi[1], roi[0]+roi[2], roi[1] + roi[3]), mWidth, mHeight, mStrideY);
+ Log("processImage done");
+ return result;
+ }
+
+ @Override
+ public boolean isSupported() {
+ return mIsSupported;
+ }
+
+ @Override
+ public boolean isFrameListener() {
+ return false;
+ }
+
+ @Override
+ public boolean isManualMode() {
+ return true;
+ }
+
+ @Override
+ public void manualCapture(CaptureRequest.Builder builder, CameraCaptureSession captureSession,
+ CameraCaptureSession.CaptureCallback callback, Handler handler) throws CameraAccessException {
+ if (mMinFocusDistance == -1f) {
+ mMinFocusDistance = mModule.getMainCameraCharacteristics().get(CameraCharacteristics.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
+ }
+ float step = mMinFocusDistance / NUM_REQUIRED_IMAGE;
+ for(int i=0; i < NUM_REQUIRED_IMAGE; i++) {
+ float value = (i * step);
+ mModule.setAFModeToPreview(mModule.getMainCameraId(), CaptureRequest.CONTROL_AF_MODE_OFF);
+ mModule.setFocusDistanceToPreview(mModule.getMainCameraId(), value);
+ try {
+ int count = FOCUS_ADJUST_TIME_OUT;
+ do {
+ Thread.sleep(5);
+ count -= 5;
+ if(count <= 0) {
+ break;
+ }
+ } while(Math.abs(mModule.getPreviewCaptureResult().get(CaptureResult.LENS_FOCUS_DISTANCE)
+ - value) >= 0.5f);
+ } catch (InterruptedException e) {
+ }
+ builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_OFF);
+ builder.set(CaptureRequest.LENS_FOCUS_DISTANCE, value);
+ captureSession.capture(builder.build(), callback, handler);
+ Log.d(TAG, "Request: " + value);
+ }
+ }
+
+ public static boolean isSupportedStatic() {
+ return mIsSupported;
+ }
+
+ private byte[] nv21ToJpeg(ByteBuffer bY, ByteBuffer bVU, Rect roi, int orientation) {
+ ByteBuffer buf = ByteBuffer.allocate(mStrideY*mHeight*3/2);
+ buf.put(bY);
+ bY.rewind();
+ if(bVU != null) {
+ buf.put(bVU);
+ bVU.rewind();
+ }
+ BitmapOutputStream bos = new BitmapOutputStream(1024);
+ YuvImage im = new YuvImage(buf.array(), ImageFormat.NV21,
+ mWidth, mHeight, new int[]{mStrideY, mStrideVU});
+ im.compressToJpeg(roi, 50, bos);
+ byte[] bytes = bos.getArray();
+ bytes = PostProcessor.addExifTags(bytes, orientation);
+ return bytes;
+ }
+
+ private class BitmapOutputStream extends ByteArrayOutputStream {
+ public BitmapOutputStream(int size) {
+ super(size);
+ }
+
+ public byte[] getArray() {
+ return buf;
+ }
+ }
+
+ private void saveToPrivateFile(final int index, final byte[] bytes) {
+ new Thread() {
+ public void run() {
+ String filesPath = mActivity.getFilesDir()+"/Ubifocus";
+ File file = new File(filesPath);
+ if(!file.exists()) {
+ file.mkdir();
+ }
+ file = new File(filesPath+"/"+NAMES[index]);
+ try {
+ FileOutputStream out = new FileOutputStream(file);
+ out.write(bytes, 0, bytes.length);
+ out.close();
+ } catch (Exception e) {
+ }
+ }
+ }.start();
+ }
+
+ private native int nativeInit(int width, int height, int yStride, int vuStride, int numImages);
+ private native int nativeDeinit();
+ private native int nativeAddImage(ByteBuffer yB, ByteBuffer vuB, int ySize, int vuSize, int imageNum);
+ private native int nativeGetDepthMap(byte[] depthMapBuf, int depthMapWidth, int depthMapHeight);
+ private native int nativeProcessImage(byte[] buffer, int[] roi, int[] depthMapSize);
+
+ static {
+ try {
+ System.loadLibrary("jni_ubifocus");
+ mIsSupported = true;
+ }catch(UnsatisfiedLinkError e) {
+ mIsSupported = false;
+ }
+ }
+}
diff --git a/src/com/android/camera/ui/RenderOverlay.java b/src/com/android/camera/ui/RenderOverlay.java
index 318a754dd..8e2f4457f 100644
--- a/src/com/android/camera/ui/RenderOverlay.java
+++ b/src/com/android/camera/ui/RenderOverlay.java
@@ -92,6 +92,14 @@ public class RenderOverlay extends FrameLayout {
if (mGestures != null) {
if (!mGestures.isEnabled()) return false;
mGestures.dispatchTouch(m);
+ return true;
+ }
+ if (mTouchClients != null) {
+ boolean res = false;
+ for (Renderer client : mTouchClients) {
+ res |= client.onTouchEvent(m);
+ }
+ return res;
}
return true;
}
diff --git a/src/com/android/camera/ui/TrackingFocusRenderer.java b/src/com/android/camera/ui/TrackingFocusRenderer.java
new file mode 100644
index 000000000..3738137c2
--- /dev/null
+++ b/src/com/android/camera/ui/TrackingFocusRenderer.java
@@ -0,0 +1,285 @@
+/*
+Copyright (c) 2016, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package com.android.camera.ui;
+
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.view.MotionEvent;
+
+import com.android.camera.CameraActivity;
+import com.android.camera.CaptureModule;
+import com.android.camera.CaptureUI;
+import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener;
+
+public class TrackingFocusRenderer extends OverlayRenderer implements FocusIndicator {
+ private FocusRequestThread mFocusRequestThread = null;
+ private TrackingFocusFrameListener.Result mResult;
+ private CameraActivity mActivity;
+ private CaptureModule mModule;
+ private Paint mTargetPaint;
+ private int mInX = -1;
+ private int mInY = -1;
+ private final static int CIRCLE_THUMB_SIZE = 100;
+ private Object mLock = new Object();
+ private Rect mSurfaceDim;
+ private CaptureUI mUI;
+
+ public final static int STATUS_INIT = 0;
+ public final static int STATUS_INPUT = 1;
+ public final static int STATUS_TRACKING = 2;
+ public final static int STATUS_TRACKED = 3;
+ private int mStatus = STATUS_INIT;
+
+ private final static String TAG = "TrackingFocusRenderer";
+ private final static boolean DEBUG = false; //Enabling DEBUG LOG reduces the performance drastically.
+
+ private void printErrorLog(String msg) {
+ if(DEBUG) {
+ android.util.Log.e(TAG, msg);
+ }
+ }
+
+ @Override
+ public void setVisible(boolean visible) {
+ super.setVisible(visible);
+ if(!visible) {
+ synchronized (mLock) {
+ mStatus = STATUS_INIT;
+ mResult = null;
+ mInX = 0;
+ mInY = 0;
+ }
+ if(mFocusRequestThread != null) {
+ mFocusRequestThread.kill();
+ mFocusRequestThread = null;
+ }
+ } else {
+ mFocusRequestThread = new FocusRequestThread();
+ mFocusRequestThread.start();
+ }
+ }
+
+ public void setSurfaceDim(int left, int top, int right, int bottom) {
+ mSurfaceDim = new Rect(left, top, right, bottom);
+ }
+
+ public TrackingFocusRenderer(CameraActivity activity, CaptureModule module, CaptureUI ui) {
+ mActivity = activity;
+ mModule = module;
+ mUI = ui;
+ mTargetPaint = new Paint();
+ mTargetPaint.setStrokeWidth(4f);
+ mTargetPaint.setStyle(Paint.Style.STROKE);
+ }
+
+ @Override
+ public boolean handlesTouch() {
+ return true;
+ }
+
+ @Override
+ public boolean onTouchEvent(MotionEvent event) {
+ switch(event.getActionMasked()) {
+ case MotionEvent.ACTION_DOWN:
+ break;
+ case MotionEvent.ACTION_UP:
+ synchronized (mLock) {
+ mInX = (int) event.getX();
+ mInY = (int) event.getY();
+ if(!mSurfaceDim.contains(mInX, mInY)) {
+ break;
+ }
+ mStatus = STATUS_INPUT;
+ }
+ update();
+ break;
+ }
+ return true;
+ }
+
+ public int[] getInputCords(int width, int height) {
+ synchronized (mLock) {
+ if (mStatus != STATUS_INPUT) {
+ return null;
+ }
+ mStatus = STATUS_TRACKING;
+ int x = (mUI.getDisplaySize().y-1-mInY);
+ int y = mInX;
+ int bottomMargin = mUI.getDisplaySize().y - mSurfaceDim.bottom;
+ x = (int)((x - bottomMargin)*((float)width/mSurfaceDim.height()));
+ y = (int)((y - mSurfaceDim.left)*((float)height/mSurfaceDim.width()));
+
+ /* It's supposed to give x,y like above but library x,y is reversed*/
+ if(mModule.isBackCamera()) {
+ x = width - 1 - x;
+ }
+ y = height-1-y;
+
+ return new int[]{x, y};
+ }
+ }
+
+ public void putRegisteredCords(TrackingFocusFrameListener.Result result, int width, int height) {
+ synchronized (mLock) {
+ if(result != null && result.pos != null &&
+ !(result.pos.width() == 0 && result.pos.height() == 0)) {
+ result.pos = translateToSurface(result.pos, width, height);
+ mResult = result;
+ mStatus = STATUS_TRACKED;
+ } else {
+ mStatus = STATUS_TRACKING;
+ }
+ }
+ mActivity.runOnUiThread(new Runnable() {
+ public void run() {
+ update();
+ }
+ });
+ }
+
+ private Rect translateToSurface(Rect src, int width, int height) {
+ /* It's supposed to be this but direction is reversed in library.
+ int x = src.centerY();
+ int y = width-1-src.centerX();
+ */
+ int x = height-1-src.centerY();
+ int y = src.centerX();
+ if(!mModule.isBackCamera()) {
+ y = width-1-src.centerX();
+ }
+ int w = (int)(src.height()*((float)mSurfaceDim.width()/height));
+ int h = (int)(src.width()*((float)mSurfaceDim.height()/width));
+ x = mSurfaceDim.left + (int)(x*((float)mSurfaceDim.width()/height));
+ y = mSurfaceDim.top + (int)(y*((float)mSurfaceDim.height()/width));
+ Rect rect = new Rect();
+ rect.left = x - w/2;
+ rect.top = y - h/2;
+ rect.right = rect.left + w;
+ rect.bottom = rect.top + h;
+ return rect;
+ }
+
+ private Rect mRect;
+
+ @Override
+ public void onDraw(Canvas canvas) {
+ synchronized (mLock) {
+ if(mResult == null) {
+ return;
+ }
+ if(mStatus == STATUS_TRACKED) {
+ mRect = mResult.pos;
+ }
+ }
+
+ if(mStatus == STATUS_TRACKED) {
+ if(mRect != null) {
+ mTargetPaint.setColor(Color.GREEN);
+ canvas.drawRect(mRect, mTargetPaint);
+ }
+ } else if(mStatus == STATUS_TRACKING){
+ if(mRect != null) {
+ mTargetPaint.setColor(Color.RED);
+ canvas.drawRect(mRect, mTargetPaint);
+ }
+ } else if(mStatus == STATUS_INPUT){
+ mTargetPaint.setColor(Color.RED);
+ canvas.drawCircle(mInX, mInY, CIRCLE_THUMB_SIZE, mTargetPaint);
+ }
+ }
+
+ @Override
+ public void showStart() {
+ }
+
+ @Override
+ public void showSuccess(boolean timeout) {
+ }
+
+ @Override
+ public void showFail(boolean timeout) {
+
+ }
+
+ @Override
+ public void clear() {
+
+ }
+
+ private class FocusRequestThread extends Thread {
+ private boolean isRunning = true;
+ private final static int FOCUS_DELAY = 1000;
+ private final static int MIN_DIFF_CORDS = 100;
+ private final static int MIN_DIFF_SIZE = 100;
+ private int mOldX = -MIN_DIFF_CORDS;
+ private int mOldY = -MIN_DIFF_CORDS;
+ private int mOldWidth = -MIN_DIFF_SIZE;
+ private int mOldHeight = -MIN_DIFF_SIZE;
+ private int mNewX;
+ private int mNewY;
+ private int mNewWidth;
+ private int mNewHeight;
+
+ public void kill() {
+ isRunning = false;
+ }
+
+ public void run() {
+ while(isRunning) {
+ try {
+ Thread.sleep(FOCUS_DELAY);
+ }catch(InterruptedException e) {
+ //Ignore
+ }
+
+ synchronized (mLock) {
+ if (mResult == null || mResult.pos == null
+ || (mResult.pos.centerX() == 0 && mResult.pos.centerY() == 0)) {
+ continue;
+ }
+ mNewX = mResult.pos.centerX();
+ mNewY = mResult.pos.centerY();
+ mNewWidth = mResult.pos.width();
+ mNewHeight = mResult.pos.height();
+ }
+ if(Math.abs(mOldX - mNewX) >= MIN_DIFF_CORDS || Math.abs(mOldY - mNewY) >= MIN_DIFF_CORDS ||
+ Math.abs(mOldWidth - mNewWidth) >= MIN_DIFF_SIZE || Math.abs(mOldHeight - mNewHeight) >= MIN_DIFF_SIZE) {
+ mModule.onSingleTapUp(null, mNewX, mNewY);
+ mOldX = mNewX;
+ mOldY = mNewY;
+ mOldWidth = mNewWidth;
+ mOldHeight = mNewHeight;
+ }
+ }
+ }
+ }
+} \ No newline at end of file