diff options
author | Camera Software Integration <camswint@localhost> | 2016-07-19 22:49:02 -0700 |
---|---|---|
committer | Gerrit - the friendly Code Review server <code-review@localhost> | 2016-07-19 22:49:02 -0700 |
commit | 3e577469f0140202450e05995e335511a02424dd (patch) | |
tree | 6e2dd9a299e37e3a737cfd389fd56cca285ad0c9 | |
parent | d8f8770adb894d7ccdedde05512be91885fec07f (diff) | |
parent | 12d17d80e1227e9164e7e0aa7524cd39d23bf124 (diff) | |
download | android_packages_apps_Snap-3e577469f0140202450e05995e335511a02424dd.tar.gz android_packages_apps_Snap-3e577469f0140202450e05995e335511a02424dd.tar.bz2 android_packages_apps_Snap-3e577469f0140202450e05995e335511a02424dd.zip |
Merge "SnapdragonCamera: FrameListener and Video frame processor" into camera.lnx.1.0-dev.1.0
-rw-r--r-- | res/values/camera2arrays.xml | 10 | ||||
-rw-r--r-- | res/values/qcomstrings.xml | 1 | ||||
-rw-r--r-- | res/values/strings.xml | 1 | ||||
-rw-r--r-- | res/xml/capture_preferences.xml | 6 | ||||
-rw-r--r-- | src/com/android/camera/CaptureModule.java | 184 | ||||
-rw-r--r-- | src/com/android/camera/CaptureUI.java | 48 | ||||
-rw-r--r-- | src/com/android/camera/SettingsManager.java | 41 | ||||
-rw-r--r-- | src/com/android/camera/imageprocessor/FrameProcessor.java | 228 | ||||
-rw-r--r-- | src/com/android/camera/imageprocessor/filter/BeautificationFilter.java | 5 | ||||
-rw-r--r-- | src/com/android/camera/imageprocessor/filter/ImageFilter.java | 2 | ||||
-rw-r--r-- | src/com/android/camera/imageprocessor/filter/OptizoomFilter.java | 5 | ||||
-rw-r--r-- | src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java | 5 | ||||
-rw-r--r-- | src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java | 276 | ||||
-rw-r--r-- | src/com/android/camera/ui/RenderOverlay.java | 8 | ||||
-rw-r--r-- | src/com/android/camera/ui/TrackingFocusRenderer.java | 285 |
15 files changed, 967 insertions, 138 deletions
diff --git a/res/values/camera2arrays.xml b/res/values/camera2arrays.xml index 089838405..c991dbf92 100644 --- a/res/values/camera2arrays.xml +++ b/res/values/camera2arrays.xml @@ -70,6 +70,16 @@ <item>off</item> </string-array> + <string-array name="pref_camera2_trackingfocus_entries" translatable="true"> + <item>On</item> + <item>Off</item> + </string-array> + + <string-array name="pref_camera2_trackingfocus_entryvalues" translatable="false"> + <item>on</item> + <item>off</item> + </string-array> + <string-array name="pref_camera2_mono_preview_entries" translatable="true"> <item>@string/pref_camera2_mono_preview_entry_on</item> <item>@string/pref_camera2_mono_preview_entry_off</item> diff --git a/res/values/qcomstrings.xml b/res/values/qcomstrings.xml index 6d1ec7862..e8917f3ee 100644 --- a/res/values/qcomstrings.xml +++ b/res/values/qcomstrings.xml @@ -1003,5 +1003,6 @@ <string name="pref_camera2_videosnap_entry_enable" translatable="true">Enable</string> <string name="pref_camera2_videosnap_entry_disable" translatable="true">Disable</string> + <string name="pref_camera2_trackingfocus_title" translatable="true">Tracking Focus</string> </resources> diff --git a/res/values/strings.xml b/res/values/strings.xml index 898329a7f..775aeb60d 100644 --- a/res/values/strings.xml +++ b/res/values/strings.xml @@ -748,4 +748,5 @@ CHAR LIMIT = NONE] --> <string name="remaining_photos_format">%d left</string> <string name="initial_recording_seconds">00:00</string> + <string name="makeup_video_size_limit">Makeup works only under VGA size in video recording.</string> </resources> diff --git a/res/xml/capture_preferences.xml b/res/xml/capture_preferences.xml index d7d478d0a..c5aab1a7d 100644 --- a/res/xml/capture_preferences.xml +++ b/res/xml/capture_preferences.xml @@ -273,4 +273,10 @@ camera:entryValues="@array/pref_camera2_videosnap_entryvalues" camera:key="pref_camera2_videosnap_key" camera:title="@string/pref_camera2_videosnap_title" /> + <ListPreference + camera:defaultValue="off" + camera:key="pref_camera2_trackingfocus_key" + camera:entries="@array/pref_camera2_trackingfocus_entries" + camera:entryValues="@array/pref_camera2_trackingfocus_entryvalues" + camera:title="@string/pref_camera2_trackingfocus_title"/> </PreferenceGroup> diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java index a7448aedb..078707a7f 100644 --- a/src/com/android/camera/CaptureModule.java +++ b/src/com/android/camera/CaptureModule.java @@ -68,6 +68,7 @@ import android.view.KeyEvent; import android.view.OrientationEventListener; import android.view.Surface; import android.view.SurfaceHolder; +import android.view.SurfaceView; import android.view.View; import android.widget.Toast; @@ -80,6 +81,7 @@ import com.android.camera.imageprocessor.filter.SharpshooterFilter; import com.android.camera.ui.CountDownView; import com.android.camera.ui.ModuleSwitcher; import com.android.camera.ui.RotateTextToast; +import com.android.camera.ui.TrackingFocusRenderer; import com.android.camera.util.CameraUtil; import com.android.camera.util.PersistUtil; import com.android.camera.util.SettingTranslation; @@ -216,7 +218,6 @@ public class CaptureModule implements CameraModule, PhotoController, */ private PostProcessor mPostProcessor; private FrameProcessor mFrameProcessor; - private Size mFrameProcPreviewOutputSize; private CaptureResult mPreviewCaptureResult; private Face[] mPreviewFaces = null; private Face[] mStickyFaces = null; @@ -240,7 +241,6 @@ public class CaptureModule implements CameraModule, PhotoController, private CameraCaptureSession mCurrentSession; private Size mPreviewSize; private Size mPictureSize; - private Size mVideoPreviewSize; private Size mVideoSize; private Size mVideoSnapshotSize; @@ -682,7 +682,9 @@ public class CaptureModule implements CameraModule, PhotoController, } // When the session is ready, we start displaying the preview. mCaptureSession[id] = cameraCaptureSession; - mCurrentSession = cameraCaptureSession; + if(id == getMainCameraId()) { + mCurrentSession = cameraCaptureSession; + } initializePreviewConfiguration(id); setDisplayOrientation(); updateFaceDetection(); @@ -725,6 +727,11 @@ public class CaptureModule implements CameraModule, PhotoController, } }; + if(id == getMainCameraId()) { + mFrameProcessor.init(mPreviewSize); + mFrameProcessor.setOutputSurface(surface); + } + if(isClearSightOn()) { mPreviewRequestBuilder[id].addTarget(surface); list.add(surface); @@ -732,16 +739,17 @@ public class CaptureModule implements CameraModule, PhotoController, id == BAYER_ID, mCameraDevice[id], list, captureSessionCallback); } else if (id == getMainCameraId()) { if(mFrameProcessor.isFrameFilterEnabled()) { - mFrameProcessor.init(mFrameProcPreviewOutputSize); mActivity.runOnUiThread(new Runnable() { public void run() { - mUI.getSurfaceHolder().setFixedSize(mFrameProcPreviewOutputSize.getHeight(), mFrameProcPreviewOutputSize.getWidth()); + mUI.getSurfaceHolder().setFixedSize(mPreviewSize.getHeight(), mPreviewSize.getWidth()); } }); } - mFrameProcessor.setOutputSurface(surface); - mPreviewRequestBuilder[id].addTarget(mFrameProcessor.getInputSurface()); - list.add(mFrameProcessor.getInputSurface()); + List<Surface> surfaces = mFrameProcessor.getInputSurfaces(); + for(Surface surs : surfaces) { + mPreviewRequestBuilder[id].addTarget(surs); + list.add(surs); + } list.add(mImageReader[id].getSurface()); mCameraDevice[id].createCaptureSession(list, captureSessionCallback, null); } else { @@ -856,7 +864,7 @@ public class CaptureModule implements CameraModule, PhotoController, CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice .TEMPLATE_PREVIEW); builder.setTag(id); - builder.addTarget(getPreviewSurface(id)); + addPreviewSurface(builder, null, id); applySettingsForLockFocus(builder, id); CaptureRequest request = builder.build(); @@ -878,7 +886,7 @@ public class CaptureModule implements CameraModule, PhotoController, CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice .TEMPLATE_PREVIEW); builder.setTag(id); - builder.addTarget(getPreviewSurface(id)); + addPreviewSurface(builder, null, id); mControlAFMode = CaptureRequest.CONTROL_AF_MODE_AUTO; applySettingsForAutoFocus(builder, id); @@ -934,7 +942,7 @@ public class CaptureModule implements CameraModule, PhotoController, captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, CameraUtil.getJpegRotation(id, mOrientation)); captureBuilder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO); - captureBuilder.addTarget(getPreviewSurface(id)); + addPreviewSurface(captureBuilder, null, id); captureBuilder.set(CaptureRequest.CONTROL_AF_MODE, mControlAFMode); captureBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER, CaptureRequest.CONTROL_AF_TRIGGER_IDLE); applySettingsForCapture(captureBuilder, id); @@ -1106,7 +1114,7 @@ public class CaptureModule implements CameraModule, PhotoController, CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice .TEMPLATE_PREVIEW); builder.setTag(id); - builder.addTarget(getPreviewSurface(id)); + addPreviewSurface(builder, null, id); applySettingsForPrecapture(builder, id); CaptureRequest request = builder.build(); mPrecaptureRequestHashCode[id] = request.hashCode(); @@ -1117,36 +1125,6 @@ public class CaptureModule implements CameraModule, PhotoController, } } - private void determineFrameProcPreviewOutputSize(List<Size> sizeList, float targetRatio) { - Display display = mActivity.getWindowManager().getDefaultDisplay(); - Point ds = new Point(); - display.getSize(ds); - int i=0, j=0, width, height; - float ratio; - for(; i < sizeList.size(); i++) { - width = sizeList.get(i).getHeight(); - height = sizeList.get(i).getWidth(); - ratio = (float)height/width; - if(ds.x >= width || ds.y >= height) { - if(j == 0) { - j = i; - } - if(ratio < targetRatio + 0.2f && ratio > targetRatio - 0.2f) { - break; - } - } - } - if(i == sizeList.size()) { - if(j != 0) { - mFrameProcPreviewOutputSize = sizeList.get(j); - } else { - mFrameProcPreviewOutputSize = sizeList.get(sizeList.size()-1); - } - } else { - mFrameProcPreviewOutputSize = sizeList.get(i); - } - } - public CameraCharacteristics getMainCameraCharacteristics() { return mMainCameraCharacteristics; } @@ -1180,19 +1158,6 @@ public class CaptureModule implements CameraModule, PhotoController, } mCameraId[i] = cameraId; - if (i == getMainCameraId()) { - String pictureSize = mSettingsManager.getValue(SettingsManager - .KEY_PICTURE_SIZE); - - Size size = parsePictureSize(pictureSize); - - Point screenSize = new Point(); - mActivity.getWindowManager().getDefaultDisplay().getSize(screenSize); - Size[] prevSizes = map.getOutputSizes(imageFormat); - mFrameProcPreviewOutputSize = getOptimalPreviewSize(size, prevSizes, screenSize.x, - screenSize.y); - } - if (isClearSightOn()) { if(i == getMainCameraId()) { ClearSightImageProcessor.getInstance().init(mPictureSize.getWidth(), @@ -1205,10 +1170,6 @@ public class CaptureModule implements CameraModule, PhotoController, mPictureSize.getHeight(), imageFormat, MAX_IMAGE_NUM); if(mPostProcessor.isFilterOn() && i == getMainCameraId()) { mImageReader[i].setOnImageAvailableListener(mPostProcessor, mImageAvailableHandler); -// if(mFrameProcessor.isFrameFilterEnabled()) { -// determineFrameProcPreviewOutputSize(Arrays.asList(map.getOutputSizes(imageFormat)), -// (float) size.getWidth() / (float) size.getHeight()); -// } } else { mImageReader[i].setOnImageAvailableListener(new ImageAvailableListener(i) { @Override @@ -1283,7 +1244,7 @@ public class CaptureModule implements CameraModule, PhotoController, CaptureRequest.Builder builder = mCameraDevice[id].createCaptureRequest(CameraDevice .TEMPLATE_PREVIEW); builder.setTag(id); - builder.addTarget(getPreviewSurface(id)); + addPreviewSurface(builder, null, id); applySettingsForUnlockFocus(builder, id); mCaptureSession[id].capture(builder.build(), mCaptureCallback, mCameraHandler); @@ -1548,11 +1509,16 @@ public class CaptureModule implements CameraModule, PhotoController, } private ArrayList<Integer> getFrameProcFilterId() { - String scene = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP); ArrayList<Integer> filters = new ArrayList<Integer>(); + + String scene = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP); if(scene != null && scene.equalsIgnoreCase("on")) { filters.add(FrameProcessor.FILTER_MAKEUP); } + String trackingFocus = mSettingsManager.getValue(SettingsManager.KEY_TRACKINGFOCUS); + if(trackingFocus != null && trackingFocus.equalsIgnoreCase("on")) { + filters.add(FrameProcessor.LISTENER_TRACKING_FOCUS); + } return filters; } @@ -1999,11 +1965,6 @@ public class CaptureModule implements CameraModule, PhotoController, private void updateVideoSize() { String videoSize = mSettingsManager.getValue(SettingsManager.KEY_VIDEO_QUALITY); mVideoSize = parsePictureSize(videoSize); - Point screenSize = new Point(); - mActivity.getWindowManager().getDefaultDisplay().getSize(screenSize); - Size[] prevSizes = mSettingsManager.getSupportedOutputSize(getMainCameraId(), - MediaRecorder.class); - mVideoPreviewSize = getOptimalPreviewSize(mVideoSize, prevSizes, screenSize.x, screenSize.y); } private void updateVideoSnapshotSize() { @@ -2041,8 +2002,8 @@ public class CaptureModule implements CameraModule, PhotoController, mState[cameraId] = STATE_PREVIEW; mControlAFMode = CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE; closePreviewSession(); - boolean changed = mUI.setPreviewSize(mVideoPreviewSize.getWidth(), mVideoPreviewSize - .getHeight()); + mFrameProcessor.onClose(); + boolean changed = mUI.setPreviewSize(mVideoSize.getWidth(), mVideoSize.getHeight()); if (changed) { mUI.hideSurfaceView(); mUI.showSurfaceView(); @@ -2055,11 +2016,18 @@ public class CaptureModule implements CameraModule, PhotoController, .createCaptureRequest(CameraDevice.TEMPLATE_RECORD); List<Surface> surfaces = new ArrayList<>(); - Surface previewSurface = getPreviewSurface(cameraId); - surfaces.add(previewSurface); - mPreviewBuilder.addTarget(previewSurface); - surfaces.add(mMediaRecorder.getSurface()); - mPreviewBuilder.addTarget(mMediaRecorder.getSurface()); + Surface surface = getPreviewSurfaceForSession(cameraId); + mFrameProcessor.init(mVideoSize); + if(mFrameProcessor.isFrameFilterEnabled()) { + mActivity.runOnUiThread(new Runnable() { + public void run() { + mUI.getSurfaceHolder().setFixedSize(mVideoSize.getHeight(), mVideoSize.getWidth()); + } + }); + } + mFrameProcessor.setOutputSurface(surface); + mFrameProcessor.setVideoOutputSurface(mMediaRecorder.getSurface()); + addPreviewSurface(mPreviewBuilder, surfaces, cameraId); surfaces.add(mVideoSnapshotImageReader.getSurface()); mCameraDevice[cameraId].createCaptureSession(surfaces, new CameraCaptureSession @@ -2235,12 +2203,13 @@ public class CaptureModule implements CameraModule, PhotoController, Log.d(TAG, "stopRecordingVideo " + cameraId); // Stop recording + mFrameProcessor.onClose(); + mFrameProcessor.setVideoOutputSurface(null); closePreviewSession(); mMediaRecorder.stop(); mMediaRecorder.reset(); saveVideo(); - mUI.showRecordingUI(false); mIsRecordingVideo = false; boolean changed = mUI.setPreviewSize(mPreviewSize.getWidth(), mPreviewSize.getHeight()); @@ -2348,9 +2317,13 @@ public class CaptureModule implements CameraModule, PhotoController, String fileName = generateVideoFilename(outputFormat); Log.v(TAG, "New video filename: " + fileName); mMediaRecorder.setOutputFile(fileName); - mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate); mMediaRecorder.setVideoFrameRate(mProfile.videoFrameRate); - mMediaRecorder.setVideoSize(mProfile.videoFrameWidth, mProfile.videoFrameHeight); + mMediaRecorder.setVideoEncodingBitRate(mProfile.videoBitRate); + if(mFrameProcessor.isFrameFilterEnabled()) { + mMediaRecorder.setVideoSize(mProfile.videoFrameHeight, mProfile.videoFrameWidth); + } else { + mMediaRecorder.setVideoSize(mProfile.videoFrameWidth, mProfile.videoFrameHeight); + } mMediaRecorder.setVideoEncoder(videoEncoder); if (!mCaptureTimeLapse) { mMediaRecorder.setAudioEncodingBitRate(mProfile.audioBitRate); @@ -2374,7 +2347,11 @@ public class CaptureModule implements CameraModule, PhotoController, rotation += Integer.parseInt(videoRotation); rotation = rotation % 360; } - mMediaRecorder.setOrientationHint(rotation); + if(mFrameProcessor.isFrameFilterEnabled()) { + mMediaRecorder.setOrientationHint(0); + } else { + mMediaRecorder.setOrientationHint(rotation); + } mMediaRecorder.prepare(); } @@ -2708,15 +2685,22 @@ public class CaptureModule implements CameraModule, PhotoController, } } - private Surface getPreviewSurface(int id) { - if (isBackCamera()) { - if (getCameraMode() == DUAL_MODE && id == MONO_ID) { - return mUI.getSurfaceHolder2().getSurface(); - } else { - return mFrameProcessor.getInputSurface(); + private void addPreviewSurface(CaptureRequest.Builder builder, List<Surface> surfaceList, int id) { + if (isBackCamera() && getCameraMode() == DUAL_MODE && id == MONO_ID) { + if(surfaceList != null) { + surfaceList.add(mUI.getSurfaceHolder2().getSurface()); } + builder.addTarget(mUI.getSurfaceHolder2().getSurface()); + return; } else { - return mFrameProcessor.getInputSurface(); + List<Surface> surfaces = mFrameProcessor.getInputSurfaces(); + for(Surface surface : surfaces) { + if(surfaceList != null) { + surfaceList.add(surface); + } + builder.addTarget(surface); + } + return; } } @@ -2811,6 +2795,23 @@ public class CaptureModule implements CameraModule, PhotoController, mDisplayOrientation = CameraUtil.getDisplayOrientation(mDisplayRotation, getMainCameraId()); } + private void checkVideoSizeDependency() { + String makeup = mSettingsManager.getValue(SettingsManager.KEY_MAKEUP); + if(makeup.equalsIgnoreCase("on")) { + if(mVideoSize.getWidth() > 640 || mVideoSize.getHeight() > 480) { + mActivity.runOnUiThread(new Runnable() { + public void run() { + RotateTextToast.makeText(mActivity, R.string.makeup_video_size_limit, Toast.LENGTH_SHORT).show(); + } + }); + mSettingsManager.setValue(mSettingsManager.KEY_VIDEO_QUALITY, "640x480"); + } + mSettingsManager.updateVideoQualityMenu(getMainCameraId(), 640, 480); + } else { + mSettingsManager.updateVideoQualityMenu(getMainCameraId(), -1, -1); + } + } + @Override public void onSettingsChanged(List<SettingsManager.SettingState> settings) { if (mPaused) return; @@ -2864,7 +2865,11 @@ public class CaptureModule implements CameraModule, PhotoController, if (count == 0) restart(); return; case SettingsManager.KEY_MAKEUP: - restart(); + if (count == 0) restart(); + checkVideoSizeDependency(); + return; + case SettingsManager.KEY_TRACKINGFOCUS: + if (count == 0) restart(); return; case SettingsManager.KEY_SCENE_MODE: if (count == 0 && checkNeedToRestart(value)) { @@ -2938,8 +2943,9 @@ public class CaptureModule implements CameraModule, PhotoController, private boolean checkNeedToRestart(String value) { mPostProcessor.setFilter(PostProcessor.FILTER_NONE); int mode = Integer.parseInt(value); - if (getPostProcFilterId(mode) != PostProcessor.FILTER_NONE) + if (getPostProcFilterId(mode) != PostProcessor.FILTER_NONE) { return true; + } if (value.equals(SettingsManager.SCENE_MODE_DUAL_STRING) && mCurrentMode != DUAL_MODE) return true; if (!value.equals(SettingsManager.SCENE_MODE_DUAL_STRING) && mCurrentMode == DUAL_MODE) @@ -2987,7 +2993,7 @@ public class CaptureModule implements CameraModule, PhotoController, private Size getMaxSizeWithRatio(Size[] sizes, Size reference) { float ratio = (float) reference.getWidth() / reference.getHeight(); - for (Size size: sizes) { + for (Size size : sizes) { float prevRatio = (float) size.getWidth() / size.getHeight(); if (Math.abs(prevRatio - ratio) < 0.01) { return size; @@ -2996,6 +3002,10 @@ public class CaptureModule implements CameraModule, PhotoController, return sizes[0]; } + public TrackingFocusRenderer getTrackingForcusRenderer() { + return mUI.getTrackingFocusRenderer(); + } + /** * Compares two {@code Size}s based on their areas. */ diff --git a/src/com/android/camera/CaptureUI.java b/src/com/android/camera/CaptureUI.java index f5f23fa18..cefc77651 100644 --- a/src/com/android/camera/CaptureUI.java +++ b/src/com/android/camera/CaptureUI.java @@ -55,6 +55,7 @@ import com.android.camera.ui.RenderOverlay; import com.android.camera.ui.RotateImageView; import com.android.camera.ui.RotateLayout; import com.android.camera.ui.RotateTextToast; +import com.android.camera.ui.TrackingFocusRenderer; import com.android.camera.ui.ZoomRenderer; import com.android.camera.util.CameraUtil; @@ -100,7 +101,8 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, SettingsManager.KEY_FACE_DETECTION, SettingsManager.KEY_VIDEO_FLASH_MODE, SettingsManager.KEY_VIDEO_DURATION, - SettingsManager.KEY_VIDEO_QUALITY + SettingsManager.KEY_VIDEO_QUALITY, + SettingsManager.KEY_TRACKINGFOCUS }; String[] mDeveloperKeys = new String[]{ SettingsManager.KEY_REDEYE_REDUCTION, @@ -135,9 +137,10 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, private PreviewGestures mGestures; private boolean mUIhidden = false; private SettingsManager mSettingsManager; - + private TrackingFocusRenderer mTrackingFocusRenderer; private ImageView mThumbnail; private Camera2FaceView mFaceView; + private Point mDisplaySize = new Point(); private SurfaceHolder.Callback callback = new SurfaceHolder.Callback() { @@ -152,6 +155,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, Log.v(TAG, "surfaceCreated"); mSurfaceHolder = holder; previewUIReady(); + if(mTrackingFocusRenderer != null && mTrackingFocusRenderer.isVisible()) { + mTrackingFocusRenderer.setSurfaceDim(mSurfaceView.getLeft(), mSurfaceView.getTop(), mSurfaceView.getRight(), mSurfaceView.getBottom()); + } } @Override @@ -229,6 +235,14 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, mModule.onPreviewUIDestroyed(); } + public TrackingFocusRenderer getTrackingFocusRenderer() { + return mTrackingFocusRenderer; + } + + public Point getDisplaySize() { + return mDisplaySize; + } + public CaptureUI(CameraActivity activity, CaptureModule module, View parent) { mActivity = activity; mModule = module; @@ -269,6 +283,15 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, initSceneModeButton(); initSwitchCamera(); + mTrackingFocusRenderer = new TrackingFocusRenderer(mActivity, mModule, this); + mRenderOverlay.addRenderer(mTrackingFocusRenderer); + String trackingFocus = mSettingsManager.getValue(SettingsManager.KEY_TRACKINGFOCUS); + if(trackingFocus != null && trackingFocus.equalsIgnoreCase("on")) { + mTrackingFocusRenderer.setVisible(true); + } else { + mTrackingFocusRenderer.setVisible(false); + } + mSwitcher = (ModuleSwitcher) mRootView.findViewById(R.id.camera_switcher); mSwitcher.setCurrentIndex(ModuleSwitcher.PHOTO_MODULE_INDEX); mSwitcher.setSwitchListener(mActivity); @@ -316,11 +339,10 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, mCameraControls = (CameraControls) mRootView.findViewById(R.id.camera_controls); mFaceView = (Camera2FaceView) mRootView.findViewById(R.id.face_view); - Point size = new Point(); - mActivity.getWindowManager().getDefaultDisplay().getSize(size); - mScreenRatio = CameraUtil.determineRatio(size.x, size.y); + mActivity.getWindowManager().getDefaultDisplay().getSize(mDisplaySize); + mScreenRatio = CameraUtil.determineRatio(mDisplaySize.x, mDisplaySize.y); if (mScreenRatio == CameraUtil.RATIO_16_9) { - int l = size.x > size.y ? size.x : size.y; + int l = mDisplaySize.x > mDisplaySize.y ? mDisplaySize.x : mDisplaySize.y; int tm = mActivity.getResources().getDimensionPixelSize(R.dimen.preview_top_margin); int bm = mActivity.getResources().getDimensionPixelSize(R.dimen.preview_bottom_margin); mTopMargin = l / 4 * tm / (tm + bm); @@ -366,6 +388,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, initializeSettingMenu(); initSceneModeButton(); initFilterModeButton(); + if(mTrackingFocusRenderer != null) { + mTrackingFocusRenderer.setVisible(true); + } } // called from onResume but only the first time @@ -1319,6 +1344,9 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, cancelCountDown(); collapseCameraControls(); if (mFaceView != null) mFaceView.clear(); + if(mTrackingFocusRenderer != null) { + mTrackingFocusRenderer.setVisible(false); + } } public boolean collapseCameraControls() { @@ -1331,6 +1359,14 @@ public class CaptureUI implements FocusOverlayManager.FocusUI, } private FocusIndicator getFocusIndicator() { + String trackingFocus = mSettingsManager.getValue(SettingsManager.KEY_TRACKINGFOCUS); + if (trackingFocus != null && trackingFocus.equalsIgnoreCase("on")) { + if (mPieRenderer != null) { + mPieRenderer.clear(); + } + return mTrackingFocusRenderer; + } + return (mFaceView != null && mFaceView.faceExists()) ? mFaceView : mPieRenderer; } diff --git a/src/com/android/camera/SettingsManager.java b/src/com/android/camera/SettingsManager.java index 37976f190..b5ac8b3ee 100644 --- a/src/com/android/camera/SettingsManager.java +++ b/src/com/android/camera/SettingsManager.java @@ -79,6 +79,7 @@ public class SettingsManager implements ListMenu.SettingsListener { public static final String KEY_FLASH_MODE = "pref_camera2_flashmode_key"; public static final String KEY_WHITE_BALANCE = "pref_camera2_whitebalance_key"; public static final String KEY_MAKEUP = "pref_camera2_makeup_key"; + public static final String KEY_TRACKINGFOCUS = "pref_camera2_trackingfocus_key"; public static final String KEY_CAMERA2 = "pref_camera2_camera2_key"; public static final String KEY_MONO_ONLY = "pref_camera2_mono_only_key"; public static final String KEY_MONO_PREVIEW = "pref_camera2_mono_preview_key"; @@ -219,6 +220,17 @@ public class SettingsManager implements ListMenu.SettingsListener { filterPreferences(cameraId); initDepedencyTable(); initializeValueMap(); + checkInitialDependency(cameraId); + } + + private void checkInitialDependency(int cameraId) { + ListPreference videoQuality = mPreferenceGroup.findPreference(KEY_VIDEO_QUALITY); + if (videoQuality != null) { + String scene = getValue(SettingsManager.KEY_MAKEUP); + if(scene != null && scene.equalsIgnoreCase("on")) { + updateVideoQualityMenu(cameraId, 640, 480); + } + } } private void initDepedencyTable() { @@ -447,6 +459,20 @@ public class SettingsManager implements ListMenu.SettingsListener { else return CaptureModule.MONO_ID; } + public void updateVideoQualityMenu(int cameraId, int maxWidth, int maxHeight) { + ListPreference videoQuality = mPreferenceGroup.findPreference(KEY_VIDEO_QUALITY); + if (videoQuality != null) { + List<String> sizes; + if(maxWidth < 0 && maxHeight < 0) { + sizes = getSupportedVideoSize(cameraId); + } else { + sizes = getSupportedVideoSize(cameraId, maxWidth, maxHeight); + } + CameraSettings.filterUnsupportedOptions(mPreferenceGroup, + videoQuality, sizes); + } + } + private void filterPreferences(int cameraId) { // filter unsupported preferences ListPreference whiteBalance = mPreferenceGroup.findPreference(KEY_WHITE_BALANCE); @@ -502,7 +528,7 @@ public class SettingsManager implements ListMenu.SettingsListener { iso, getSupportedIso(cameraId)); } - if (iso != null) { + if (videoQuality != null) { CameraSettings.filterUnsupportedOptions(mPreferenceGroup, videoQuality, getSupportedVideoSize(cameraId)); } @@ -754,6 +780,19 @@ public class SettingsManager implements ListMenu.SettingsListener { return res; } + private List<String> getSupportedVideoSize(int cameraId, int maxWidth, int maxHeight) { + StreamConfigurationMap map = mCharacteristics.get(cameraId).get( + CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP); + Size[] sizes = map.getOutputSizes(MediaRecorder.class); + List<String> res = new ArrayList<>(); + for (int i = 0; i < sizes.length; i++) { + if(sizes[i].getWidth() <= maxWidth && sizes[i].getHeight() <= maxHeight) { + res.add(sizes[i].toString()); + } + } + return res; + } + private List<String> getSupportedRedeyeReduction(int cameraId) { int[] flashModes = mCharacteristics.get(cameraId).get(CameraCharacteristics .CONTROL_AE_AVAILABLE_MODES); diff --git a/src/com/android/camera/imageprocessor/FrameProcessor.java b/src/com/android/camera/imageprocessor/FrameProcessor.java index 6a2091158..74db08b31 100644 --- a/src/com/android/camera/imageprocessor/FrameProcessor.java +++ b/src/com/android/camera/imageprocessor/FrameProcessor.java @@ -30,14 +30,7 @@ IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.android.camera.imageprocessor; import android.app.Activity; -import android.content.Context; -import android.graphics.Bitmap; -import android.graphics.BitmapFactory; -import android.graphics.Canvas; import android.graphics.ImageFormat; -import android.graphics.Matrix; -import android.graphics.Rect; -import android.graphics.YuvImage; import android.media.Image; import android.media.ImageReader; import android.os.Handler; @@ -45,21 +38,23 @@ import android.os.HandlerThread; import android.renderscript.Allocation; import android.renderscript.Element; import android.renderscript.RenderScript; -import android.renderscript.ScriptIntrinsicYuvToRGB; import android.renderscript.Type; -import android.util.Log; import android.util.Size; import android.view.Surface; +import android.widget.Toast; import com.android.camera.CaptureModule; -import com.android.camera.PhotoModule; +import com.android.camera.SettingsManager; import com.android.camera.imageprocessor.filter.BeautificationFilter; import com.android.camera.imageprocessor.filter.ImageFilter; -import com.android.camera.util.CameraUtil; +import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener; +import com.android.camera.ui.RotateTextToast; -import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.Semaphore; +import org.codeaurora.snapcam.R; public class FrameProcessor { @@ -67,13 +62,17 @@ public class FrameProcessor { private Allocation mInputAllocation; private Allocation mProcessAllocation; private Allocation mOutputAllocation; + private Allocation mVideoOutputAllocation; private HandlerThread mProcessingThread; private Handler mProcessingHandler; private HandlerThread mOutingThread; private Handler mOutingHandler; + private HandlerThread mListeningThread; + private Handler mListeningHandler; - public ProcessingTask mTask; + private ProcessingTask mTask; + private ListeningTask mListeningTask; private RenderScript mRs; private Activity mActivity; ScriptC_YuvToRgb mRsYuvToRGB; @@ -84,10 +83,13 @@ public class FrameProcessor { private ArrayList<ImageFilter> mPreviewFilters; private ArrayList<ImageFilter> mFinalFilters; private Surface mSurfaceAsItIs; + private Surface mVideoSurfaceAsItIs; private boolean mIsActive = false; public static final int FILTER_NONE = 0; public static final int FILTER_MAKEUP = 1; + public static final int LISTENER_TRACKING_FOCUS = 2; private CaptureModule mModule; + private boolean mIsVideoOn = false; public FrameProcessor(Activity activity, CaptureModule module) { mActivity = activity; @@ -97,6 +99,7 @@ public class FrameProcessor { } public void init(Size previewDim) { + mIsActive = true; mSize = previewDim; synchronized (mAllocationLock) { mRs = RenderScript.create(mActivity); @@ -122,6 +125,13 @@ public class FrameProcessor { mOutingHandler = new Handler(mOutingThread.getLooper()); } + if(mListeningThread == null) { + mListeningThread = new HandlerThread("FrameListeningThread"); + mListeningThread.start(); + mListeningHandler = new Handler(mListeningThread.getLooper()); + } + + mListeningTask = new ListeningTask(); mTask = new ProcessingTask(); mInputImageReader.setOnImageAvailableListener(mTask, mProcessingHandler); mIsAllocationEverUsed = false; @@ -153,12 +163,12 @@ public class FrameProcessor { } private void cleanFilterSet() { - if(mPreviewFilters != null) { + if (mPreviewFilters != null) { for (ImageFilter filter : mPreviewFilters) { filter.deinit(); } } - if(mFinalFilters != null) { + if (mFinalFilters != null) { for (ImageFilter filter : mFinalFilters) { filter.deinit(); } @@ -168,25 +178,29 @@ public class FrameProcessor { } public void onOpen(ArrayList<Integer> filterIds) { - mIsActive = true; - synchronized (mAllocationLock) { - cleanFilterSet(); - if (filterIds != null) { - for (Integer i : filterIds) { - addFilter(i.intValue()); - } + cleanFilterSet(); + if (filterIds != null) { + for (Integer i : filterIds) { + addFilter(i.intValue()); } } } private void addFilter(int filterId) { - if(filterId == FILTER_MAKEUP) { - ImageFilter filter = new BeautificationFilter(mModule); - if(filter.isSupported()) { - mPreviewFilters.add(filter); + ImageFilter filter = null; + if (filterId == FILTER_MAKEUP) { + filter = new BeautificationFilter(mModule); + } else if (filterId == LISTENER_TRACKING_FOCUS) { + filter = new TrackingFocusFrameListener(mModule); + } + + if (filter != null && filter.isSupported()) { + mPreviewFilters.add(filter); + if (!filter.isFrameListener()) { mFinalFilters.add(filter); } } + } public void onClose() { @@ -202,6 +216,9 @@ public class FrameProcessor { if (mProcessAllocation != null) { mProcessAllocation.destroy(); } + if (mVideoOutputAllocation != null) { + mVideoOutputAllocation.destroy(); + } } if (mRs != null) { mRs.destroy(); @@ -210,6 +227,7 @@ public class FrameProcessor { mProcessAllocation = null; mOutputAllocation = null; mInputAllocation = null; + mVideoOutputAllocation = null; } if (mProcessingThread != null) { mProcessingThread.quitSafely(); @@ -229,6 +247,15 @@ public class FrameProcessor { } catch (InterruptedException e) { } } + if (mListeningThread != null) { + mListeningThread.quitSafely(); + try { + mListeningThread.join(); + mListeningThread = null; + mListeningHandler = null; + } catch (InterruptedException e) { + } + } for(ImageFilter filter : mPreviewFilters) { filter.deinit(); } @@ -237,32 +264,75 @@ public class FrameProcessor { } } - public Surface getInputSurface() { - if(mPreviewFilters.size() == 0) { - return mSurfaceAsItIs; - } + private Surface getReaderSurface() { synchronized (mAllocationLock) { - if (mInputImageReader == null) + if (mInputImageReader == null) { return null; + } return mInputImageReader.getSurface(); } } + public List<Surface> getInputSurfaces() { + List<Surface> surfaces = new ArrayList<Surface>(); + if(mPreviewFilters.size() == 0 && mFinalFilters.size() == 0) { + surfaces.add(mSurfaceAsItIs); + if(mIsVideoOn) { + surfaces.add(mVideoSurfaceAsItIs); + } + } else if (mFinalFilters.size() == 0) { + surfaces.add(mSurfaceAsItIs); + if(mIsVideoOn) { + surfaces.add(mVideoSurfaceAsItIs); + } + surfaces.add(getReaderSurface()); + } else { + surfaces.add(getReaderSurface()); + } + return surfaces; + } + public boolean isFrameFilterEnabled() { - if(mPreviewFilters.size() == 0) { + if(mFinalFilters.size() == 0) { return false; } return true; } public void setOutputSurface(Surface surface) { - if(mPreviewFilters.size() == 0) { - mSurfaceAsItIs = surface; - } else { + mSurfaceAsItIs = surface; + if(mFinalFilters.size() != 0) { mOutputAllocation.setSurface(surface); } } + public void setVideoOutputSurface(Surface surface) { + if(surface == null) { + synchronized (mAllocationLock) { + if (mVideoOutputAllocation != null) { + mVideoOutputAllocation.destroy(); + } + mVideoOutputAllocation = null; + } + mIsVideoOn = false; + return; + } + mVideoSurfaceAsItIs = surface; + mIsVideoOn = true; + if(mFinalFilters.size() != 0) { + synchronized (mAllocationLock) { + if (mVideoOutputAllocation == null) { + Type.Builder rgbTypeBuilder = new Type.Builder(mRs, Element.RGBA_8888(mRs)); + rgbTypeBuilder.setX(mSize.getHeight()); + rgbTypeBuilder.setY(mSize.getWidth()); + mVideoOutputAllocation = Allocation.createTyped(mRs, rgbTypeBuilder.create(), + Allocation.USAGE_SCRIPT | Allocation.USAGE_IO_OUTPUT); + } + mVideoOutputAllocation.setSurface(surface); + } + } + } + class ProcessingTask implements Runnable, ImageReader.OnImageAvailableListener { byte[] yvuBytes = null; int ySize; @@ -276,12 +346,14 @@ public class FrameProcessor { @Override public void onImageAvailable(ImageReader reader) { synchronized (mAllocationLock) { - if(mOutputAllocation == null) + if(mOutputAllocation == null) { return; + } try { Image image = reader.acquireLatestImage(); - if(image == null) + if(image == null) { return; + } if(!mIsActive) { image.close(); return; @@ -289,23 +361,35 @@ public class FrameProcessor { mIsAllocationEverUsed = true; ByteBuffer bY = image.getPlanes()[0].getBuffer(); ByteBuffer bVU = image.getPlanes()[2].getBuffer(); - if(yvuBytes == null) { + if(yvuBytes == null || width != mSize.getWidth() || height != mSize.getHeight()) { stride = image.getPlanes()[0].getRowStride(); width = mSize.getWidth(); height = mSize.getHeight(); ySize = stride * mSize.getHeight(); yvuBytes = new byte[ySize*3/2]; } + boolean needToFeedSurface = false; //Start processing yvu buf for (ImageFilter filter : mPreviewFilters) { - filter.init(mSize.getWidth(), mSize.getHeight(), stride, stride); - filter.addImage(bY, bVU, 0, new Boolean(true)); + if(filter.isFrameListener()) { + if (mListeningTask.setParam(filter, bY, bVU, mSize.getWidth(), mSize.getHeight(), stride)) { + mListeningHandler.post(mListeningTask); + } + } else { + filter.init(mSize.getWidth(), mSize.getHeight(), stride, stride); + filter.addImage(bY, bVU, 0, new Boolean(true)); + needToFeedSurface = true; + } + bY.rewind(); + bVU.rewind(); } //End processing yvu buf - bY.get(yvuBytes, 0, bY.remaining()); - bVU.get(yvuBytes, ySize, bVU.remaining()); + if(needToFeedSurface) { + bY.get(yvuBytes, 0, bY.remaining()); + bVU.get(yvuBytes, ySize, bVU.remaining()); + mOutingHandler.post(this); + } image.close(); - mOutingHandler.post(this); } catch (IllegalStateException e) { } } @@ -318,12 +402,68 @@ public class FrameProcessor { return; } if(mInputAllocation == null) { - createAllocation(stride, height, stride-width); + createAllocation(stride, height, stride - width); } mInputAllocation.copyFrom(yvuBytes); mRsRotator.forEach_rotate90andMerge(mInputAllocation); mRsYuvToRGB.forEach_nv21ToRgb(mOutputAllocation); mOutputAllocation.ioSend(); + if(mVideoOutputAllocation != null) { + mVideoOutputAllocation.copyFrom(mOutputAllocation); + mVideoOutputAllocation.ioSend(); + } + } + } + } + + class ListeningTask implements Runnable { + + ImageFilter mFilter; + ByteBuffer mBY = null, mBVU = null; + int mWidth, mHeight, mStride; + int bYSize, bVUSize; + Semaphore mMutureLock = new Semaphore(1); + + public boolean setParam(ImageFilter filter, ByteBuffer bY, ByteBuffer bVU, int width, int height, int stride) { + if(!mIsActive) { + return false; + } + if (!mMutureLock.tryAcquire()) { + return false; + } + mFilter = filter; + if (mBY == null || bYSize != bY.remaining()) { + bYSize = bY.remaining(); + mBY = ByteBuffer.allocateDirect(bYSize); + } + if (mBVU == null || bVUSize != bVU.remaining()) { + bVUSize = bVU.remaining(); + mBVU = ByteBuffer.allocateDirect(bVUSize); + } + mBY.rewind(); + mBVU.rewind(); + mBY.put(bY); + mBVU.put(bVU); + mWidth = width; + mHeight = height; + mStride = stride; + mMutureLock.release(); + return true; + } + + @Override + public void run() { + try { + if (!mIsActive) { + return; + } + mMutureLock.acquire(); + mBY.rewind(); + mBVU.rewind(); + mFilter.init(mWidth, mHeight, mStride, mStride); + mFilter.addImage(mBY, mBVU, 0, new Boolean(true)); + mMutureLock.release(); + } catch (InterruptedException e) { } } } diff --git a/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java b/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java index 6ec9376d0..7a13895f4 100644 --- a/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java +++ b/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java @@ -120,6 +120,11 @@ public class BeautificationFilter implements ImageFilter { return mIsSupported; } + @Override + public boolean isFrameListener() { + return false; + } + private native int nativeBeautificationProcess(ByteBuffer yB, ByteBuffer vuB, int width, int height, int stride, int fleft, int ftop, int fright, int fbottom); diff --git a/src/com/android/camera/imageprocessor/filter/ImageFilter.java b/src/com/android/camera/imageprocessor/filter/ImageFilter.java index e62d9b30a..b778dee03 100644 --- a/src/com/android/camera/imageprocessor/filter/ImageFilter.java +++ b/src/com/android/camera/imageprocessor/filter/ImageFilter.java @@ -71,4 +71,6 @@ public interface ImageFilter { this.stride = stride; } } + + boolean isFrameListener(); } diff --git a/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java b/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java index 9b5af29dc..16b8ae163 100644 --- a/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java +++ b/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java @@ -127,6 +127,11 @@ public class OptizoomFilter implements ImageFilter{ return mIsSupported; } + @Override + public boolean isFrameListener() { + return false; + } + public static boolean isSupportedStatic() { return mIsSupported; } diff --git a/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java b/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java index 74469afc3..e123f3439 100644 --- a/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java +++ b/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java @@ -150,6 +150,11 @@ public class SharpshooterFilter implements ImageFilter{ return mIsSupported; } + @Override + public boolean isFrameListener() { + return false; + } + public static boolean isSupportedStatic() { return mIsSupported; } diff --git a/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java b/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java new file mode 100644 index 000000000..a12eef91a --- /dev/null +++ b/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java @@ -0,0 +1,276 @@ +/* +Copyright (c) 2016, The Linux Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of The Linux Foundation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.android.camera.imageprocessor.filter; + +import android.graphics.Point; +import android.graphics.Rect; +import android.hardware.Camera; +import android.hardware.camera2.CaptureRequest; +import android.hardware.camera2.params.Face; +import android.util.Log; +import android.util.Size; + +import com.android.camera.CaptureModule; +import com.android.camera.ui.FilmstripBottomControls; +import com.android.camera.ui.TrackingFocusRenderer; + +import java.nio.ByteBuffer; +import java.util.HashSet; +import java.util.List; + +public class TrackingFocusFrameListener implements ImageFilter { + + int mWidth; + int mHeight; + int mStrideY; + int mStrideVU; + private CaptureModule mModule; + private static boolean DEBUG = false; + private static String TAG = "TrackingFocusFrameListener"; + private static boolean mIsSupported = false; + private Rect imageRect; + public static final int PENDING_REGISTRATION = -1; + public static final int MAX_NUM_TRACKED_OBJECTS = 3; + private int mTrackedId = PENDING_REGISTRATION; + private boolean mIsInitialzed = false; + private TrackingFocusRenderer mTrackingFocusRender; + byte[] yvuBytes = null; + private int[] mInputCords = null; + private boolean mIsFirstTime = true; + + public enum OperationMode { + DEFAULT, + PERFORMANCE, + CPU_OFFLOAD, + LOW_POWER + } + + public enum Precision { + HIGH, + LOW + } + + public TrackingFocusFrameListener(CaptureModule module) { + mModule = module; + } + + @Override + public List<CaptureRequest> setRequiredImages(CaptureRequest.Builder builder) { + return null; + } + + @Override + public String getStringName() { + return "TrackingFocusFrameListener"; + } + + @Override + public int getNumRequiredImage() { + return 1; + } + + @Override + public void init(int width, int height, int strideY, int strideVU) { + mWidth = width; + mHeight = height; + mStrideY = strideY; + mStrideVU = strideVU; + if(!mIsInitialzed) { + if (nInit(OperationMode.PERFORMANCE.ordinal(), Precision.HIGH.ordinal(), mWidth, mHeight, mStrideY) < 0) { + Log.e(TAG, "Initialization failed."); + } + imageRect = new Rect(0, 0, width, height); + mTrackingFocusRender = mModule.getTrackingForcusRenderer(); + yvuBytes = new byte[mStrideY*mHeight*3/2]; + mIsInitialzed = true; + } + } + + @Override + public void deinit() { + if (mIsInitialzed) { + nRelease(); + mIsInitialzed = false; + } + } + + @Override + public void addImage(ByteBuffer bY, ByteBuffer bVU, int imageNum, Object isPreview) { + bY.get(yvuBytes, 0, bY.remaining()); + bVU.get(yvuBytes, mStrideY * mHeight, bVU.remaining()); + int[] cords = mTrackingFocusRender.getInputCords(mWidth, mHeight); + if(cords != null) { + if(mTrackedId != PENDING_REGISTRATION) { + unregisterObject(mTrackedId); + mTrackedId = PENDING_REGISTRATION; + } + mIsFirstTime = true; + mInputCords = cords; + } + if(mInputCords != null) { + if (mTrackedId == PENDING_REGISTRATION) { + try { + mTrackedId = registerObject(yvuBytes, new Point(mInputCords[0], mInputCords[1]), mIsFirstTime); + mIsFirstTime = false; + }catch(IllegalArgumentException e) { + mTrackedId = PENDING_REGISTRATION; + Log.e(TAG, e.toString()); + } + } + if(mTrackedId != PENDING_REGISTRATION) { + mTrackingFocusRender.putRegisteredCords(trackObjects(yvuBytes), mWidth, mHeight); + } + } + } + + public static class Result { + public final int id; + public final int confidence; + public Rect pos; + + private Result(int id, int confidence, int left, int top, int right, int bottom) { + this.id = id; + this.confidence = confidence; + this.pos = new Rect(left, top, right, bottom); + } + + public static Result Copy(Result old) { + Result result = new Result(old.id, old.confidence, old.pos.left, old.pos.top, old.pos.right, old.pos.bottom); + return result; + } + } + + public int getMinRoiDimension() { + if (!mIsInitialzed) { + throw new IllegalArgumentException("already released"); + } + + return nGetMinRoiDimension(); + } + + public int getMaxRoiDimension() { + if (!mIsInitialzed) { + throw new IllegalArgumentException("already released"); + } + + return nGetMaxRoiDimension(); + } + + public int registerObject(byte[] imageDataNV21, Rect rect) + { + if (imageDataNV21 == null || imageDataNV21.length < getMinFrameSize()) { + throw new IllegalArgumentException("imageDataNV21 null or too small to encode frame"); + } else if (rect == null || rect.isEmpty() || !imageRect.contains(rect)) { + throw new IllegalArgumentException("rect must be non-empty and be entirely inside " + + "the frame"); + } else if (!mIsInitialzed) { + throw new IllegalArgumentException("already released"); + } + int id = nRegisterObjectByRect(imageDataNV21, rect.left, rect.top, rect.right, rect.bottom); + if(id == 0) { + id = PENDING_REGISTRATION; + } + mTrackedId = id; + return mTrackedId; + } + + public int registerObject(byte[] imageDataNV21, Point point, boolean firstTime) + { + if (imageDataNV21 == null || imageDataNV21.length < getMinFrameSize()) { + throw new IllegalArgumentException("imageDataNV21 null or too small to encode frame" + + imageDataNV21.length+ " "+getMinFrameSize()); + } else if (point == null || !imageRect.contains(point.x, point.y)) { + throw new IllegalArgumentException("point is outside the image frame: "+imageRect.toString()); + } else if (!mIsInitialzed) { + throw new IllegalArgumentException("already released"); + } + int id = nRegisterObjectByPoint(imageDataNV21, point.x, point.y, firstTime); + if(id == 0) { + id = PENDING_REGISTRATION; + } + mTrackedId = id; + return mTrackedId; + } + + public void unregisterObject(int id) + { + if (id == PENDING_REGISTRATION) { + Log.e(TAG, "There's a pending object"); + } else if (!mIsInitialzed) { + Log.e(TAG, "already released"); + } + nUnregisterObject(id); + } + + public Result trackObjects(byte[] imageDataNV21) + { + if (imageDataNV21 == null || imageDataNV21.length < getMinFrameSize()) { + Log.e(TAG, "imageDataNV21 null or too small to encode frame " + + imageDataNV21.length+ " "+getMinFrameSize()); + } else if (!mIsInitialzed) { + Log.e(TAG, "It's released"); + } + + int[] nResults = nTrackObjects(imageDataNV21); + return new Result(nResults[0], nResults[1], nResults[2], nResults[3], nResults[4], nResults[5]); + } + + private int getMinFrameSize() { + return ((mStrideY * imageRect.bottom * 3) / 2); + } + + @Override + public ResultImage processImage() { + return null; + } + + @Override + public boolean isSupported() { + return mIsSupported; + } + + @Override + public boolean isFrameListener() { + return true; + } + + private native int nInit(int operationMode, int precision, int width, int height, int stride); + private native void nRelease(); + private native int nGetMinRoiDimension(); + private native int nGetMaxRoiDimension(); + private native int nRegisterObjectByRect(byte[] imageDataNV21, int left, int top, int right, int bottom); + private native int nRegisterObjectByPoint(byte[] imageDataNV21, int x, int y, boolean firstTime); + private native void nUnregisterObject(int id); + private native int[] nTrackObjects(byte[] imageDataNV21); + + static { + mIsSupported = false; + } +} diff --git a/src/com/android/camera/ui/RenderOverlay.java b/src/com/android/camera/ui/RenderOverlay.java index 318a754dd..8e2f4457f 100644 --- a/src/com/android/camera/ui/RenderOverlay.java +++ b/src/com/android/camera/ui/RenderOverlay.java @@ -92,6 +92,14 @@ public class RenderOverlay extends FrameLayout { if (mGestures != null) { if (!mGestures.isEnabled()) return false; mGestures.dispatchTouch(m); + return true; + } + if (mTouchClients != null) { + boolean res = false; + for (Renderer client : mTouchClients) { + res |= client.onTouchEvent(m); + } + return res; } return true; } diff --git a/src/com/android/camera/ui/TrackingFocusRenderer.java b/src/com/android/camera/ui/TrackingFocusRenderer.java new file mode 100644 index 000000000..3738137c2 --- /dev/null +++ b/src/com/android/camera/ui/TrackingFocusRenderer.java @@ -0,0 +1,285 @@ +/* +Copyright (c) 2016, The Linux Foundation. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following + disclaimer in the documentation and/or other materials provided + with the distribution. + * Neither the name of The Linux Foundation nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT +ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS +BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR +BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, +WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE +OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN +IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +package com.android.camera.ui; + +import android.graphics.Canvas; +import android.graphics.Color; +import android.graphics.Paint; +import android.graphics.Rect; +import android.view.MotionEvent; + +import com.android.camera.CameraActivity; +import com.android.camera.CaptureModule; +import com.android.camera.CaptureUI; +import com.android.camera.imageprocessor.filter.TrackingFocusFrameListener; + +public class TrackingFocusRenderer extends OverlayRenderer implements FocusIndicator { + private FocusRequestThread mFocusRequestThread = null; + private TrackingFocusFrameListener.Result mResult; + private CameraActivity mActivity; + private CaptureModule mModule; + private Paint mTargetPaint; + private int mInX = -1; + private int mInY = -1; + private final static int CIRCLE_THUMB_SIZE = 100; + private Object mLock = new Object(); + private Rect mSurfaceDim; + private CaptureUI mUI; + + public final static int STATUS_INIT = 0; + public final static int STATUS_INPUT = 1; + public final static int STATUS_TRACKING = 2; + public final static int STATUS_TRACKED = 3; + private int mStatus = STATUS_INIT; + + private final static String TAG = "TrackingFocusRenderer"; + private final static boolean DEBUG = false; //Enabling DEBUG LOG reduces the performance drastically. + + private void printErrorLog(String msg) { + if(DEBUG) { + android.util.Log.e(TAG, msg); + } + } + + @Override + public void setVisible(boolean visible) { + super.setVisible(visible); + if(!visible) { + synchronized (mLock) { + mStatus = STATUS_INIT; + mResult = null; + mInX = 0; + mInY = 0; + } + if(mFocusRequestThread != null) { + mFocusRequestThread.kill(); + mFocusRequestThread = null; + } + } else { + mFocusRequestThread = new FocusRequestThread(); + mFocusRequestThread.start(); + } + } + + public void setSurfaceDim(int left, int top, int right, int bottom) { + mSurfaceDim = new Rect(left, top, right, bottom); + } + + public TrackingFocusRenderer(CameraActivity activity, CaptureModule module, CaptureUI ui) { + mActivity = activity; + mModule = module; + mUI = ui; + mTargetPaint = new Paint(); + mTargetPaint.setStrokeWidth(4f); + mTargetPaint.setStyle(Paint.Style.STROKE); + } + + @Override + public boolean handlesTouch() { + return true; + } + + @Override + public boolean onTouchEvent(MotionEvent event) { + switch(event.getActionMasked()) { + case MotionEvent.ACTION_DOWN: + break; + case MotionEvent.ACTION_UP: + synchronized (mLock) { + mInX = (int) event.getX(); + mInY = (int) event.getY(); + if(!mSurfaceDim.contains(mInX, mInY)) { + break; + } + mStatus = STATUS_INPUT; + } + update(); + break; + } + return true; + } + + public int[] getInputCords(int width, int height) { + synchronized (mLock) { + if (mStatus != STATUS_INPUT) { + return null; + } + mStatus = STATUS_TRACKING; + int x = (mUI.getDisplaySize().y-1-mInY); + int y = mInX; + int bottomMargin = mUI.getDisplaySize().y - mSurfaceDim.bottom; + x = (int)((x - bottomMargin)*((float)width/mSurfaceDim.height())); + y = (int)((y - mSurfaceDim.left)*((float)height/mSurfaceDim.width())); + + /* It's supposed to give x,y like above but library x,y is reversed*/ + if(mModule.isBackCamera()) { + x = width - 1 - x; + } + y = height-1-y; + + return new int[]{x, y}; + } + } + + public void putRegisteredCords(TrackingFocusFrameListener.Result result, int width, int height) { + synchronized (mLock) { + if(result != null && result.pos != null && + !(result.pos.width() == 0 && result.pos.height() == 0)) { + result.pos = translateToSurface(result.pos, width, height); + mResult = result; + mStatus = STATUS_TRACKED; + } else { + mStatus = STATUS_TRACKING; + } + } + mActivity.runOnUiThread(new Runnable() { + public void run() { + update(); + } + }); + } + + private Rect translateToSurface(Rect src, int width, int height) { + /* It's supposed to be this but direction is reversed in library. + int x = src.centerY(); + int y = width-1-src.centerX(); + */ + int x = height-1-src.centerY(); + int y = src.centerX(); + if(!mModule.isBackCamera()) { + y = width-1-src.centerX(); + } + int w = (int)(src.height()*((float)mSurfaceDim.width()/height)); + int h = (int)(src.width()*((float)mSurfaceDim.height()/width)); + x = mSurfaceDim.left + (int)(x*((float)mSurfaceDim.width()/height)); + y = mSurfaceDim.top + (int)(y*((float)mSurfaceDim.height()/width)); + Rect rect = new Rect(); + rect.left = x - w/2; + rect.top = y - h/2; + rect.right = rect.left + w; + rect.bottom = rect.top + h; + return rect; + } + + private Rect mRect; + + @Override + public void onDraw(Canvas canvas) { + synchronized (mLock) { + if(mResult == null) { + return; + } + if(mStatus == STATUS_TRACKED) { + mRect = mResult.pos; + } + } + + if(mStatus == STATUS_TRACKED) { + if(mRect != null) { + mTargetPaint.setColor(Color.GREEN); + canvas.drawRect(mRect, mTargetPaint); + } + } else if(mStatus == STATUS_TRACKING){ + if(mRect != null) { + mTargetPaint.setColor(Color.RED); + canvas.drawRect(mRect, mTargetPaint); + } + } else if(mStatus == STATUS_INPUT){ + mTargetPaint.setColor(Color.RED); + canvas.drawCircle(mInX, mInY, CIRCLE_THUMB_SIZE, mTargetPaint); + } + } + + @Override + public void showStart() { + } + + @Override + public void showSuccess(boolean timeout) { + } + + @Override + public void showFail(boolean timeout) { + + } + + @Override + public void clear() { + + } + + private class FocusRequestThread extends Thread { + private boolean isRunning = true; + private final static int FOCUS_DELAY = 1000; + private final static int MIN_DIFF_CORDS = 100; + private final static int MIN_DIFF_SIZE = 100; + private int mOldX = -MIN_DIFF_CORDS; + private int mOldY = -MIN_DIFF_CORDS; + private int mOldWidth = -MIN_DIFF_SIZE; + private int mOldHeight = -MIN_DIFF_SIZE; + private int mNewX; + private int mNewY; + private int mNewWidth; + private int mNewHeight; + + public void kill() { + isRunning = false; + } + + public void run() { + while(isRunning) { + try { + Thread.sleep(FOCUS_DELAY); + }catch(InterruptedException e) { + //Ignore + } + + synchronized (mLock) { + if (mResult == null || mResult.pos == null + || (mResult.pos.centerX() == 0 && mResult.pos.centerY() == 0)) { + continue; + } + mNewX = mResult.pos.centerX(); + mNewY = mResult.pos.centerY(); + mNewWidth = mResult.pos.width(); + mNewHeight = mResult.pos.height(); + } + if(Math.abs(mOldX - mNewX) >= MIN_DIFF_CORDS || Math.abs(mOldY - mNewY) >= MIN_DIFF_CORDS || + Math.abs(mOldWidth - mNewWidth) >= MIN_DIFF_SIZE || Math.abs(mOldHeight - mNewHeight) >= MIN_DIFF_SIZE) { + mModule.onSingleTapUp(null, mNewX, mNewY); + mOldX = mNewX; + mOldY = mNewY; + mOldWidth = mNewWidth; + mOldHeight = mNewHeight; + } + } + } + } +}
\ No newline at end of file |