diff options
author | Sascha Haeberling <haeberling@google.com> | 2013-09-18 14:28:51 -0700 |
---|---|---|
committer | Sascha Haeberling <haeberling@google.com> | 2013-09-18 14:32:55 -0700 |
commit | 638e6f06c877d90b907f66ea9c22b3c6b73c7384 (patch) | |
tree | 6d2123a6d02228f867ccc6f7e51e2a658b2092d5 /src/com/android/camera | |
parent | 4ed20592482d2ab2f3f48ee72d5b1c06bf009034 (diff) | |
download | android_packages_apps_Snap-638e6f06c877d90b907f66ea9c22b3c6b73c7384.tar.gz android_packages_apps_Snap-638e6f06c877d90b907f66ea9c22b3c6b73c7384.tar.bz2 android_packages_apps_Snap-638e6f06c877d90b907f66ea9c22b3c6b73c7384.zip |
Clean up ApiHelper and remove unused code paths.
Bug: 10821545
As we're targeting ICS there are a lot of checks and code paths
that are no longer in use. This CL cleans them up.
Change-Id: Ic3dd26628a94e134e25e2c496ccec1f1f957216d
Diffstat (limited to 'src/com/android/camera')
21 files changed, 131 insertions, 2128 deletions
diff --git a/src/com/android/camera/AndroidCameraManagerImpl.java b/src/com/android/camera/AndroidCameraManagerImpl.java index 0c07cbdb7..f3e700ec0 100644 --- a/src/com/android/camera/AndroidCameraManagerImpl.java +++ b/src/com/android/camera/AndroidCameraManagerImpl.java @@ -32,6 +32,7 @@ import android.hardware.Camera.Parameters; import android.hardware.Camera.PictureCallback; import android.hardware.Camera.PreviewCallback; import android.hardware.Camera.ShutterCallback; +import android.os.Build; import android.os.Handler; import android.os.HandlerThread; import android.os.Looper; @@ -39,8 +40,6 @@ import android.os.Message; import android.util.Log; import android.view.SurfaceHolder; -import com.android.camera.util.ApiHelper; - /** * A class to implement {@link CameraManager} of the Android camera framework. */ @@ -102,22 +101,18 @@ class AndroidCameraManagerImpl implements CameraManager { super(looper); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void startFaceDetection() { mCamera.startFaceDetection(); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void stopFaceDetection() { mCamera.stopFaceDetection(); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void setFaceDetectionListener(FaceDetectionListener listener) { mCamera.setFaceDetectionListener(listener); } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) private void setPreviewTexture(Object surfaceTexture) { try { mCamera.setPreviewTexture((SurfaceTexture) surfaceTexture); @@ -126,12 +121,12 @@ class AndroidCameraManagerImpl implements CameraManager { } } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN_MR1) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1) private void enableShutterSound(boolean enable) { mCamera.enableShutterSound(enable); } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void setAutoFocusMoveCallback( android.hardware.Camera camera, Object cb) { camera.setAutoFocusMoveCallback((AutoFocusMoveCallback) cb); @@ -407,7 +402,6 @@ class AndroidCameraManagerImpl implements CameraManager { mCameraHandler.sendEmptyMessage(LOCK); } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) @Override public void setPreviewTexture(SurfaceTexture surfaceTexture) { mCameraHandler.obtainMessage(SET_PREVIEW_TEXTURE_ASYNC, surfaceTexture).sendToTarget(); @@ -463,7 +457,7 @@ class AndroidCameraManagerImpl implements CameraManager { mCameraHandler.sendEmptyMessage(CANCEL_AUTO_FOCUS); } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) @Override public void setAutoFocusMoveCallback( Handler handler, CameraAFMoveCallback cb) { @@ -497,7 +491,6 @@ class AndroidCameraManagerImpl implements CameraManager { mCameraHandler.obtainMessage(SET_ZOOM_CHANGE_LISTENER, listener).sendToTarget(); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) public void setFaceDetectionCallback( Handler handler, CameraFaceDetectionCallback cb) { mCameraHandler.obtainMessage( @@ -591,7 +584,7 @@ class AndroidCameraManagerImpl implements CameraManager { } /** A helper class to forward AutoFocusMoveCallback to another thread. */ - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private static class AFMoveCallbackForward implements AutoFocusMoveCallback { private final Handler mHandler; private final CameraAFMoveCallback mCallback; diff --git a/src/com/android/camera/CameraActivity.java b/src/com/android/camera/CameraActivity.java index f545bbdc7..46dc2ba1e 100644 --- a/src/com/android/camera/CameraActivity.java +++ b/src/com/android/camera/CameraActivity.java @@ -32,7 +32,6 @@ import android.content.res.Configuration; import android.graphics.drawable.ColorDrawable; import android.net.Uri; import android.os.AsyncTask; -import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.IBinder; @@ -250,7 +249,7 @@ public class CameraActivity extends Activity } private static int getImmersiveFlags() { - if (isKitKatOrHigher()) { + if (ApiHelper.HAS_HIDEYBARS) { return View.SYSTEM_UI_FLAG_IMMERSIVE | View.SYSTEM_UI_FLAG_TRANSPARENT_STATUS | View.SYSTEM_UI_FLAG_TRANSPARENT_NAVIGATION @@ -262,12 +261,6 @@ public class CameraActivity extends Activity } } - public static boolean isKitKatOrHigher() { - // TODO: Remove CODENAME check as soon as VERSION_CODES.KITKAT is final. - return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT - || "KeyLimePie".equals(Build.VERSION.CODENAME); - } - public static boolean isFirstStartAfterScreenOn() { return sFirstStartAfterScreenOn; } diff --git a/src/com/android/camera/CameraManager.java b/src/com/android/camera/CameraManager.java index 07b8150ca..909e1ca75 100644 --- a/src/com/android/camera/CameraManager.java +++ b/src/com/android/camera/CameraManager.java @@ -16,19 +16,16 @@ package com.android.camera; -import java.io.IOException; - import android.annotation.TargetApi; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.hardware.Camera.ErrorCallback; import android.hardware.Camera.OnZoomChangeListener; import android.hardware.Camera.Parameters; +import android.os.Build; import android.os.Handler; import android.view.SurfaceHolder; -import com.android.camera.util.ApiHelper; - /** * An interface which provides possible camera device operations. * @@ -196,7 +193,6 @@ public interface CameraManager { * * @param surfaceTexture The {@link SurfaceTexture} for preview. */ - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) public void setPreviewTexture(final SurfaceTexture surfaceTexture); /** @@ -262,7 +258,7 @@ public interface CameraManager { * @param handler The handler in which the callback will be invoked. * @param cb The callback to be invoked when the preview data is available. */ - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) public void setAutoFocusMoveCallback(Handler handler, CameraAFMoveCallback cb); /** @@ -305,7 +301,6 @@ public interface CameraManager { * @param handler The handler in which the callback will be invoked. * @param callback The callback for face detection results. */ - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) public void setFaceDetectionCallback(Handler handler, CameraFaceDetectionCallback callback); /** diff --git a/src/com/android/camera/CameraSettings.java b/src/com/android/camera/CameraSettings.java index 791a4f719..2ae2353af 100644 --- a/src/com/android/camera/CameraSettings.java +++ b/src/com/android/camera/CameraSettings.java @@ -26,12 +26,11 @@ import android.hardware.Camera.CameraInfo; import android.hardware.Camera.Parameters; import android.hardware.Camera.Size; import android.media.CamcorderProfile; -import android.util.FloatMath; import android.util.Log; +import com.android.camera.util.ApiHelper; import com.android.camera.util.CameraUtil; import com.android.camera2.R; -import com.android.camera.util.ApiHelper; import java.util.ArrayList; import java.util.List; @@ -164,9 +163,6 @@ public class CameraSettings { ListPreference focusMode = group.findPreference(KEY_FOCUS_MODE); IconListPreference exposure = (IconListPreference) group.findPreference(KEY_EXPOSURE); - CountDownTimerPreference timer = - (CountDownTimerPreference) group.findPreference(KEY_TIMER); - ListPreference countDownSoundEffects = group.findPreference(KEY_TIMER_SOUND_EFFECTS); IconListPreference cameraIdPref = (IconListPreference) group.findPreference(KEY_CAMERA_ID); ListPreference videoFlashMode = @@ -214,19 +210,10 @@ public class CameraSettings { if (cameraIdPref != null) buildCameraId(group, cameraIdPref); if (timeLapseInterval != null) { - if (ApiHelper.HAS_TIME_LAPSE_RECORDING) { - resetIfInvalid(timeLapseInterval); - } else { - removePreference(group, timeLapseInterval.getKey()); - } + resetIfInvalid(timeLapseInterval); } if (videoEffect != null) { - if (ApiHelper.HAS_EFFECTS_RECORDING) { - initVideoEffect(group, videoEffect); - resetIfInvalid(videoEffect); - } else { - filterUnsupportedOptions(group, videoEffect, null); - } + filterUnsupportedOptions(group, videoEffect, null); } if (cameraHdr != null && (!ApiHelper.HAS_CAMERA_HDR || !CameraUtil.isCameraHdrSupported(mParameters))) { @@ -245,8 +232,8 @@ public class CameraSettings { float step = mParameters.getExposureCompensationStep(); // show only integer values for exposure compensation - int maxValue = Math.min(3, (int) FloatMath.floor(max * step)); - int minValue = Math.max(-3, (int) FloatMath.ceil(min * step)); + int maxValue = Math.min(3, (int) Math.floor(max * step)); + int minValue = Math.max(-3, (int) Math.ceil(min * step)); String explabel = mContext.getResources().getString(R.string.pref_exposure_label); CharSequence entries[] = new CharSequence[maxValue - minValue + 1]; CharSequence entryValues[] = new CharSequence[maxValue - minValue + 1]; @@ -453,51 +440,6 @@ public class CameraSettings { return 0; } - public static int readEffectType(SharedPreferences pref) { - String effectSelection = pref.getString(KEY_VIDEO_EFFECT, "none"); - if (effectSelection.equals("none")) { - return EffectsRecorder.EFFECT_NONE; - } else if (effectSelection.startsWith("goofy_face")) { - return EffectsRecorder.EFFECT_GOOFY_FACE; - } else if (effectSelection.startsWith("backdropper")) { - return EffectsRecorder.EFFECT_BACKDROPPER; - } - Log.e(TAG, "Invalid effect selection: " + effectSelection); - return EffectsRecorder.EFFECT_NONE; - } - - public static Object readEffectParameter(SharedPreferences pref) { - String effectSelection = pref.getString(KEY_VIDEO_EFFECT, "none"); - if (effectSelection.equals("none")) { - return null; - } - int separatorIndex = effectSelection.indexOf('/'); - String effectParameter = - effectSelection.substring(separatorIndex + 1); - if (effectSelection.startsWith("goofy_face")) { - if (effectParameter.equals("squeeze")) { - return EffectsRecorder.EFFECT_GF_SQUEEZE; - } else if (effectParameter.equals("big_eyes")) { - return EffectsRecorder.EFFECT_GF_BIG_EYES; - } else if (effectParameter.equals("big_mouth")) { - return EffectsRecorder.EFFECT_GF_BIG_MOUTH; - } else if (effectParameter.equals("small_mouth")) { - return EffectsRecorder.EFFECT_GF_SMALL_MOUTH; - } else if (effectParameter.equals("big_nose")) { - return EffectsRecorder.EFFECT_GF_BIG_NOSE; - } else if (effectParameter.equals("small_eyes")) { - return EffectsRecorder.EFFECT_GF_SMALL_EYES; - } - } else if (effectSelection.startsWith("backdropper")) { - // Parameter is a string that either encodes the URI to use, - // or specifies 'gallery'. - return effectParameter; - } - - Log.e(TAG, "Invalid effect selection: " + effectSelection); - return null; - } - public static void restorePreferences(Context context, ComboPreferences preferences, Parameters parameters) { int currentCameraId = readPreferredCameraId(preferences); @@ -546,25 +488,4 @@ public class CameraSettings { } return supported; } - - private void initVideoEffect(PreferenceGroup group, ListPreference videoEffect) { - CharSequence[] values = videoEffect.getEntryValues(); - - boolean goofyFaceSupported = - EffectsRecorder.isEffectSupported(EffectsRecorder.EFFECT_GOOFY_FACE); - boolean backdropperSupported = - EffectsRecorder.isEffectSupported(EffectsRecorder.EFFECT_BACKDROPPER) && - CameraUtil.isAutoExposureLockSupported(mParameters) && - CameraUtil.isAutoWhiteBalanceLockSupported(mParameters); - - ArrayList<String> supported = new ArrayList<String>(); - for (CharSequence value : values) { - String effectSelection = value.toString(); - if (!goofyFaceSupported && effectSelection.startsWith("goofy_face")) continue; - if (!backdropperSupported && effectSelection.startsWith("backdropper")) continue; - supported.add(effectSelection); - } - - filterUnsupportedOptions(group, videoEffect, supported); - } } diff --git a/src/com/android/camera/EffectsRecorder.java b/src/com/android/camera/EffectsRecorder.java deleted file mode 100644 index 151441ef9..000000000 --- a/src/com/android/camera/EffectsRecorder.java +++ /dev/null @@ -1,1240 +0,0 @@ -/* - * Copyright (C) 2011 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); you may not - * use this file except in compliance with the License. You may obtain a copy of - * the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations under - * the License. - */ - -package com.android.camera; - -import java.io.FileDescriptor; -import java.io.Serializable; -import java.lang.reflect.Constructor; -import java.lang.reflect.InvocationHandler; -import java.lang.reflect.Method; -import java.lang.reflect.Proxy; - -import android.annotation.TargetApi; -import android.content.Context; -import android.graphics.SurfaceTexture; -import android.hardware.Camera; -import android.media.CamcorderProfile; -import android.media.MediaRecorder; -import android.os.Handler; -import android.os.Looper; -import android.util.Log; - -import com.android.camera.util.ApiHelper; -import com.android.camera.util.CameraUtil; -import com.android.camera2.R; - - -/** - * Encapsulates the mobile filter framework components needed to record video - * with effects applied. Modeled after MediaRecorder. - */ -@TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) // uses SurfaceTexture -public class EffectsRecorder { - private static final String TAG = "EffectsRecorder"; - - private static Class<?> sClassFilter; - private static Method sFilterIsAvailable; - private static EffectsRecorder sEffectsRecorder; - // The index of the current effects recorder. - private static int sEffectsRecorderIndex; - - private static boolean sReflectionInited = false; - - private static Class<?> sClsLearningDoneListener; - private static Class<?> sClsOnRunnerDoneListener; - private static Class<?> sClsOnRecordingDoneListener; - private static Class<?> sClsSurfaceTextureSourceListener; - - private static Method sFilterSetInputValue; - - private static Constructor<?> sCtPoint; - private static Constructor<?> sCtQuad; - - private static Method sLearningDoneListenerOnLearningDone; - - private static Method sObjectEquals; - private static Method sObjectToString; - - private static Class<?> sClsGraphRunner; - private static Method sGraphRunnerGetGraph; - private static Method sGraphRunnerSetDoneCallback; - private static Method sGraphRunnerRun; - private static Method sGraphRunnerGetError; - private static Method sGraphRunnerStop; - - private static Method sFilterGraphGetFilter; - private static Method sFilterGraphTearDown; - - private static Method sOnRunnerDoneListenerOnRunnerDone; - - private static Class<?> sClsGraphEnvironment; - private static Constructor<?> sCtGraphEnvironment; - private static Method sGraphEnvironmentCreateGLEnvironment; - private static Method sGraphEnvironmentGetRunner; - private static Method sGraphEnvironmentAddReferences; - private static Method sGraphEnvironmentLoadGraph; - private static Method sGraphEnvironmentGetContext; - - private static Method sFilterContextGetGLEnvironment; - private static Method sGLEnvironmentIsActive; - private static Method sGLEnvironmentActivate; - private static Method sGLEnvironmentDeactivate; - private static Method sSurfaceTextureTargetDisconnect; - private static Method sOnRecordingDoneListenerOnRecordingDone; - private static Method sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady; - - private Object mLearningDoneListener; - private Object mRunnerDoneCallback; - private Object mSourceReadyCallback; - // A callback to finalize the media after the recording is done. - private Object mRecordingDoneListener; - - static { - try { - sClassFilter = Class.forName("android.filterfw.core.Filter"); - sFilterIsAvailable = sClassFilter.getMethod("isAvailable", - String.class); - } catch (ClassNotFoundException ex) { - Log.v(TAG, "Can't find the class android.filterfw.core.Filter"); - } catch (NoSuchMethodException e) { - Log.v(TAG, "Can't find the method Filter.isAvailable"); - } - } - - public static final int EFFECT_NONE = 0; - public static final int EFFECT_GOOFY_FACE = 1; - public static final int EFFECT_BACKDROPPER = 2; - - public static final int EFFECT_GF_SQUEEZE = 0; - public static final int EFFECT_GF_BIG_EYES = 1; - public static final int EFFECT_GF_BIG_MOUTH = 2; - public static final int EFFECT_GF_SMALL_MOUTH = 3; - public static final int EFFECT_GF_BIG_NOSE = 4; - public static final int EFFECT_GF_SMALL_EYES = 5; - public static final int NUM_OF_GF_EFFECTS = EFFECT_GF_SMALL_EYES + 1; - - public static final int EFFECT_MSG_STARTED_LEARNING = 0; - public static final int EFFECT_MSG_DONE_LEARNING = 1; - public static final int EFFECT_MSG_SWITCHING_EFFECT = 2; - public static final int EFFECT_MSG_EFFECTS_STOPPED = 3; - public static final int EFFECT_MSG_RECORDING_DONE = 4; - public static final int EFFECT_MSG_PREVIEW_RUNNING = 5; - - private Context mContext; - private Handler mHandler; - - private CameraManager.CameraProxy mCameraDevice; - private CamcorderProfile mProfile; - private double mCaptureRate = 0; - private SurfaceTexture mPreviewSurfaceTexture; - private int mPreviewWidth; - private int mPreviewHeight; - private MediaRecorder.OnInfoListener mInfoListener; - private MediaRecorder.OnErrorListener mErrorListener; - - private String mOutputFile; - private FileDescriptor mFd; - private int mOrientationHint = 0; - private long mMaxFileSize = 0; - private int mMaxDurationMs = 0; - private int mCameraFacing = Camera.CameraInfo.CAMERA_FACING_BACK; - private int mCameraDisplayOrientation; - - private int mEffect = EFFECT_NONE; - private int mCurrentEffect = EFFECT_NONE; - private EffectsListener mEffectsListener; - - private Object mEffectParameter; - - private Object mGraphEnv; - private int mGraphId; - private Object mRunner = null; - private Object mOldRunner = null; - - private SurfaceTexture mTextureSource; - - private static final int STATE_CONFIGURE = 0; - private static final int STATE_WAITING_FOR_SURFACE = 1; - private static final int STATE_STARTING_PREVIEW = 2; - private static final int STATE_PREVIEW = 3; - private static final int STATE_RECORD = 4; - private static final int STATE_RELEASED = 5; - private int mState = STATE_CONFIGURE; - - private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE); - private SoundClips.Player mSoundPlayer; - - /** Determine if a given effect is supported at runtime - * Some effects require libraries not available on all devices - */ - public static boolean isEffectSupported(int effectId) { - if (sFilterIsAvailable == null) return false; - - try { - switch (effectId) { - case EFFECT_GOOFY_FACE: - return (Boolean) sFilterIsAvailable.invoke(null, - "com.google.android.filterpacks.facedetect.GoofyRenderFilter"); - case EFFECT_BACKDROPPER: - return (Boolean) sFilterIsAvailable.invoke(null, - "android.filterpacks.videoproc.BackDropperFilter"); - default: - return false; - } - } catch (Exception ex) { - Log.e(TAG, "Fail to check filter", ex); - } - return false; - } - - public EffectsRecorder(Context context) { - if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")"); - - if (!sReflectionInited) { - try { - sFilterSetInputValue = sClassFilter.getMethod("setInputValue", - new Class[] {String.class, Object.class}); - - Class<?> clsPoint = Class.forName("android.filterfw.geometry.Point"); - sCtPoint = clsPoint.getConstructor(new Class[] {float.class, - float.class}); - - Class<?> clsQuad = Class.forName("android.filterfw.geometry.Quad"); - sCtQuad = clsQuad.getConstructor(new Class[] {clsPoint, clsPoint, - clsPoint, clsPoint}); - - Class<?> clsBackDropperFilter = Class.forName( - "android.filterpacks.videoproc.BackDropperFilter"); - sClsLearningDoneListener = Class.forName( - "android.filterpacks.videoproc.BackDropperFilter$LearningDoneListener"); - sLearningDoneListenerOnLearningDone = sClsLearningDoneListener - .getMethod("onLearningDone", new Class[] {clsBackDropperFilter}); - - sObjectEquals = Object.class.getMethod("equals", new Class[] {Object.class}); - sObjectToString = Object.class.getMethod("toString"); - - sClsOnRunnerDoneListener = Class.forName( - "android.filterfw.core.GraphRunner$OnRunnerDoneListener"); - sOnRunnerDoneListenerOnRunnerDone = sClsOnRunnerDoneListener.getMethod( - "onRunnerDone", new Class[] {int.class}); - - sClsGraphRunner = Class.forName("android.filterfw.core.GraphRunner"); - sGraphRunnerGetGraph = sClsGraphRunner.getMethod("getGraph"); - sGraphRunnerSetDoneCallback = sClsGraphRunner.getMethod( - "setDoneCallback", new Class[] {sClsOnRunnerDoneListener}); - sGraphRunnerRun = sClsGraphRunner.getMethod("run"); - sGraphRunnerGetError = sClsGraphRunner.getMethod("getError"); - sGraphRunnerStop = sClsGraphRunner.getMethod("stop"); - - Class<?> clsFilterContext = Class.forName("android.filterfw.core.FilterContext"); - sFilterContextGetGLEnvironment = clsFilterContext.getMethod( - "getGLEnvironment"); - - Class<?> clsFilterGraph = Class.forName("android.filterfw.core.FilterGraph"); - sFilterGraphGetFilter = clsFilterGraph.getMethod("getFilter", - new Class[] {String.class}); - sFilterGraphTearDown = clsFilterGraph.getMethod("tearDown", - new Class[] {clsFilterContext}); - - sClsGraphEnvironment = Class.forName("android.filterfw.GraphEnvironment"); - sCtGraphEnvironment = sClsGraphEnvironment.getConstructor(); - sGraphEnvironmentCreateGLEnvironment = sClsGraphEnvironment.getMethod( - "createGLEnvironment"); - sGraphEnvironmentGetRunner = sClsGraphEnvironment.getMethod( - "getRunner", new Class[] {int.class, int.class}); - sGraphEnvironmentAddReferences = sClsGraphEnvironment.getMethod( - "addReferences", new Class[] {Object[].class}); - sGraphEnvironmentLoadGraph = sClsGraphEnvironment.getMethod( - "loadGraph", new Class[] {Context.class, int.class}); - sGraphEnvironmentGetContext = sClsGraphEnvironment.getMethod( - "getContext"); - - Class<?> clsGLEnvironment = Class.forName("android.filterfw.core.GLEnvironment"); - sGLEnvironmentIsActive = clsGLEnvironment.getMethod("isActive"); - sGLEnvironmentActivate = clsGLEnvironment.getMethod("activate"); - sGLEnvironmentDeactivate = clsGLEnvironment.getMethod("deactivate"); - - Class<?> clsSurfaceTextureTarget = Class.forName( - "android.filterpacks.videosrc.SurfaceTextureTarget"); - sSurfaceTextureTargetDisconnect = clsSurfaceTextureTarget.getMethod( - "disconnect", new Class[] {clsFilterContext}); - - sClsOnRecordingDoneListener = Class.forName( - "android.filterpacks.videosink.MediaEncoderFilter$OnRecordingDoneListener"); - sOnRecordingDoneListenerOnRecordingDone = - sClsOnRecordingDoneListener.getMethod("onRecordingDone"); - - sClsSurfaceTextureSourceListener = Class.forName( - "android.filterpacks.videosrc.SurfaceTextureSource$SurfaceTextureSourceListener"); - sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady = - sClsSurfaceTextureSourceListener.getMethod( - "onSurfaceTextureSourceReady", - new Class[] {SurfaceTexture.class}); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - - sReflectionInited = true; - } - - sEffectsRecorderIndex++; - Log.v(TAG, "Current effects recorder index is " + sEffectsRecorderIndex); - sEffectsRecorder = this; - SerializableInvocationHandler sih = new SerializableInvocationHandler( - sEffectsRecorderIndex); - mLearningDoneListener = Proxy.newProxyInstance( - sClsLearningDoneListener.getClassLoader(), - new Class[] {sClsLearningDoneListener}, sih); - mRunnerDoneCallback = Proxy.newProxyInstance( - sClsOnRunnerDoneListener.getClassLoader(), - new Class[] {sClsOnRunnerDoneListener}, sih); - mSourceReadyCallback = Proxy.newProxyInstance( - sClsSurfaceTextureSourceListener.getClassLoader(), - new Class[] {sClsSurfaceTextureSourceListener}, sih); - mRecordingDoneListener = Proxy.newProxyInstance( - sClsOnRecordingDoneListener.getClassLoader(), - new Class[] {sClsOnRecordingDoneListener}, sih); - - mContext = context; - mHandler = new Handler(Looper.getMainLooper()); - mSoundPlayer = SoundClips.getPlayer(context); - } - - public synchronized void setCamera(CameraManager.CameraProxy cameraDevice) { - switch (mState) { - case STATE_PREVIEW: - throw new RuntimeException("setCamera cannot be called while previewing!"); - case STATE_RECORD: - throw new RuntimeException("setCamera cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException("setCamera called on an already released recorder!"); - default: - break; - } - - mCameraDevice = cameraDevice; - } - - public void setProfile(CamcorderProfile profile) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setProfile cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException("setProfile called on an already released recorder!"); - default: - break; - } - mProfile = profile; - } - - public void setOutputFile(String outputFile) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setOutputFile cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException("setOutputFile called on an already released recorder!"); - default: - break; - } - - mOutputFile = outputFile; - mFd = null; - } - - public void setOutputFile(FileDescriptor fd) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setOutputFile cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException("setOutputFile called on an already released recorder!"); - default: - break; - } - - mOutputFile = null; - mFd = fd; - } - - /** - * Sets the maximum filesize (in bytes) of the recording session. - * This will be passed on to the MediaEncoderFilter and then to the - * MediaRecorder ultimately. If zero or negative, the MediaRecorder will - * disable the limit - */ - public synchronized void setMaxFileSize(long maxFileSize) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setMaxFileSize cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException( - "setMaxFileSize called on an already released recorder!"); - default: - break; - } - mMaxFileSize = maxFileSize; - } - - /** - * Sets the maximum recording duration (in ms) for the next recording session - * Setting it to zero (the default) disables the limit. - */ - public synchronized void setMaxDuration(int maxDurationMs) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setMaxDuration cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException( - "setMaxDuration called on an already released recorder!"); - default: - break; - } - mMaxDurationMs = maxDurationMs; - } - - - public void setCaptureRate(double fps) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setCaptureRate cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException( - "setCaptureRate called on an already released recorder!"); - default: - break; - } - - if (mLogVerbose) Log.v(TAG, "Setting time lapse capture rate to " + fps + " fps"); - mCaptureRate = fps; - } - - public void setPreviewSurfaceTexture(SurfaceTexture previewSurfaceTexture, - int previewWidth, - int previewHeight) { - if (mLogVerbose) Log.v(TAG, "setPreviewSurfaceTexture(" + this + ")"); - switch (mState) { - case STATE_RECORD: - throw new RuntimeException( - "setPreviewSurfaceTexture cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException( - "setPreviewSurfaceTexture called on an already released recorder!"); - default: - break; - } - - mPreviewSurfaceTexture = previewSurfaceTexture; - mPreviewWidth = previewWidth; - mPreviewHeight = previewHeight; - - switch (mState) { - case STATE_WAITING_FOR_SURFACE: - startPreview(); - break; - case STATE_STARTING_PREVIEW: - case STATE_PREVIEW: - initializeEffect(true); - break; - } - } - - public void setEffect(int effect, Object effectParameter) { - if (mLogVerbose) Log.v(TAG, - "setEffect: effect ID " + effect + - ", parameter " + effectParameter.toString()); - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setEffect cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException("setEffect called on an already released recorder!"); - default: - break; - } - - mEffect = effect; - mEffectParameter = effectParameter; - - if (mState == STATE_PREVIEW || - mState == STATE_STARTING_PREVIEW) { - initializeEffect(false); - } - } - - public interface EffectsListener { - public void onEffectsUpdate(int effectId, int effectMsg); - public void onEffectsError(Exception exception, String filePath); - } - - public void setEffectsListener(EffectsListener listener) { - mEffectsListener = listener; - } - - private void setFaceDetectOrientation() { - if (mCurrentEffect == EFFECT_GOOFY_FACE) { - Object rotateFilter = getGraphFilter(mRunner, "rotate"); - Object metaRotateFilter = getGraphFilter(mRunner, "metarotate"); - setInputValue(rotateFilter, "rotation", mOrientationHint); - int reverseDegrees = (360 - mOrientationHint) % 360; - setInputValue(metaRotateFilter, "rotation", reverseDegrees); - } - } - - private void setRecordingOrientation() { - if (mState != STATE_RECORD && mRunner != null) { - Object bl = newInstance(sCtPoint, new Object[] {0, 0}); - Object br = newInstance(sCtPoint, new Object[] {1, 0}); - Object tl = newInstance(sCtPoint, new Object[] {0, 1}); - Object tr = newInstance(sCtPoint, new Object[] {1, 1}); - Object recordingRegion; - if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_BACK) { - // The back camera is not mirrored, so use a identity transform - recordingRegion = newInstance(sCtQuad, new Object[] {bl, br, tl, tr}); - } else { - // Recording region needs to be tweaked for front cameras, since they - // mirror their preview - if (mOrientationHint == 0 || mOrientationHint == 180) { - // Horizontal flip in landscape - recordingRegion = newInstance(sCtQuad, new Object[] {br, bl, tr, tl}); - } else { - // Horizontal flip in portrait - recordingRegion = newInstance(sCtQuad, new Object[] {tl, tr, bl, br}); - } - } - Object recorder = getGraphFilter(mRunner, "recorder"); - setInputValue(recorder, "inputRegion", recordingRegion); - } - } - public void setOrientationHint(int degrees) { - switch (mState) { - case STATE_RELEASED: - throw new RuntimeException( - "setOrientationHint called on an already released recorder!"); - default: - break; - } - if (mLogVerbose) Log.v(TAG, "Setting orientation hint to: " + degrees); - mOrientationHint = degrees; - setFaceDetectOrientation(); - setRecordingOrientation(); - } - - public void setCameraDisplayOrientation(int orientation) { - if (mState != STATE_CONFIGURE) { - throw new RuntimeException( - "setCameraDisplayOrientation called after configuration!"); - } - mCameraDisplayOrientation = orientation; - } - - public void setCameraFacing(int facing) { - switch (mState) { - case STATE_RELEASED: - throw new RuntimeException( - "setCameraFacing called on alrady released recorder!"); - default: - break; - } - mCameraFacing = facing; - setRecordingOrientation(); - } - - public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setInfoListener cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException( - "setInfoListener called on an already released recorder!"); - default: - break; - } - mInfoListener = infoListener; - } - - public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) { - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("setErrorListener cannot be called while recording!"); - case STATE_RELEASED: - throw new RuntimeException( - "setErrorListener called on an already released recorder!"); - default: - break; - } - mErrorListener = errorListener; - } - - private void initializeFilterFramework() { - mGraphEnv = newInstance(sCtGraphEnvironment); - invoke(mGraphEnv, sGraphEnvironmentCreateGLEnvironment); - - int videoFrameWidth = mProfile.videoFrameWidth; - int videoFrameHeight = mProfile.videoFrameHeight; - if (mCameraDisplayOrientation == 90 || mCameraDisplayOrientation == 270) { - int tmp = videoFrameWidth; - videoFrameWidth = videoFrameHeight; - videoFrameHeight = tmp; - } - - invoke(mGraphEnv, sGraphEnvironmentAddReferences, - new Object[] {new Object[] { - "textureSourceCallback", mSourceReadyCallback, - "recordingWidth", videoFrameWidth, - "recordingHeight", videoFrameHeight, - "recordingProfile", mProfile, - "learningDoneListener", mLearningDoneListener, - "recordingDoneListener", mRecordingDoneListener}}); - mRunner = null; - mGraphId = -1; - mCurrentEffect = EFFECT_NONE; - } - - private synchronized void initializeEffect(boolean forceReset) { - if (forceReset || - mCurrentEffect != mEffect || - mCurrentEffect == EFFECT_BACKDROPPER) { - - invoke(mGraphEnv, sGraphEnvironmentAddReferences, - new Object[] {new Object[] { - "previewSurfaceTexture", mPreviewSurfaceTexture, - "previewWidth", mPreviewWidth, - "previewHeight", mPreviewHeight, - "orientation", mOrientationHint}}); - if (mState == STATE_PREVIEW || - mState == STATE_STARTING_PREVIEW) { - // Switching effects while running. Inform video camera. - sendMessage(mCurrentEffect, EFFECT_MSG_SWITCHING_EFFECT); - } - - switch (mEffect) { - case EFFECT_GOOFY_FACE: - mGraphId = (Integer) invoke(mGraphEnv, - sGraphEnvironmentLoadGraph, - new Object[] {mContext, R.raw.goofy_face}); - break; - case EFFECT_BACKDROPPER: - sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING); - mGraphId = (Integer) invoke(mGraphEnv, - sGraphEnvironmentLoadGraph, - new Object[] {mContext, R.raw.backdropper}); - break; - default: - throw new RuntimeException("Unknown effect ID" + mEffect + "!"); - } - mCurrentEffect = mEffect; - - mOldRunner = mRunner; - mRunner = invoke(mGraphEnv, sGraphEnvironmentGetRunner, - new Object[] {mGraphId, - getConstant(sClsGraphEnvironment, "MODE_ASYNCHRONOUS")}); - invoke(mRunner, sGraphRunnerSetDoneCallback, new Object[] {mRunnerDoneCallback}); - if (mLogVerbose) { - Log.v(TAG, "New runner: " + mRunner - + ". Old runner: " + mOldRunner); - } - if (mState == STATE_PREVIEW || - mState == STATE_STARTING_PREVIEW) { - // Switching effects while running. Stop existing runner. - // The stop callback will take care of starting new runner. - mCameraDevice.stopPreview(); - mCameraDevice.setPreviewTexture(null); - invoke(mOldRunner, sGraphRunnerStop); - } - } - - switch (mCurrentEffect) { - case EFFECT_GOOFY_FACE: - tryEnableVideoStabilization(true); - Object goofyFilter = getGraphFilter(mRunner, "goofyrenderer"); - setInputValue(goofyFilter, "currentEffect", - ((Integer) mEffectParameter).intValue()); - break; - case EFFECT_BACKDROPPER: - tryEnableVideoStabilization(false); - Object backgroundSrc = getGraphFilter(mRunner, "background"); - if (ApiHelper.HAS_EFFECTS_RECORDING_CONTEXT_INPUT) { - // Set the context first before setting sourceUrl to - // guarantee the content URI get resolved properly. - setInputValue(backgroundSrc, "context", mContext); - } - setInputValue(backgroundSrc, "sourceUrl", mEffectParameter); - // For front camera, the background video needs to be mirrored in the - // backdropper filter - if (mCameraFacing == Camera.CameraInfo.CAMERA_FACING_FRONT) { - Object replacer = getGraphFilter(mRunner, "replacer"); - setInputValue(replacer, "mirrorBg", true); - if (mLogVerbose) Log.v(TAG, "Setting the background to be mirrored"); - } - break; - default: - break; - } - setFaceDetectOrientation(); - setRecordingOrientation(); - } - - public synchronized void startPreview() { - if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")"); - - switch (mState) { - case STATE_STARTING_PREVIEW: - case STATE_PREVIEW: - // Already running preview - Log.w(TAG, "startPreview called when already running preview"); - return; - case STATE_RECORD: - throw new RuntimeException("Cannot start preview when already recording!"); - case STATE_RELEASED: - throw new RuntimeException("setEffect called on an already released recorder!"); - default: - break; - } - - if (mEffect == EFFECT_NONE) { - throw new RuntimeException("No effect selected!"); - } - if (mEffectParameter == null) { - throw new RuntimeException("No effect parameter provided!"); - } - if (mProfile == null) { - throw new RuntimeException("No recording profile provided!"); - } - if (mPreviewSurfaceTexture == null) { - if (mLogVerbose) Log.v(TAG, "Passed a null surface; waiting for valid one"); - mState = STATE_WAITING_FOR_SURFACE; - return; - } - if (mCameraDevice == null) { - throw new RuntimeException("No camera to record from!"); - } - - if (mLogVerbose) Log.v(TAG, "Initializing filter framework and running the graph."); - initializeFilterFramework(); - - initializeEffect(true); - - mState = STATE_STARTING_PREVIEW; - invoke(mRunner, sGraphRunnerRun); - // Rest of preview startup handled in mSourceReadyCallback - } - - private Object invokeObjectEquals(Object proxy, Object[] args) { - return Boolean.valueOf(proxy == args[0]); - } - - private Object invokeObjectToString() { - return "Proxy-" + toString(); - } - - private void invokeOnLearningDone() { - if (mLogVerbose) Log.v(TAG, "Learning done callback triggered"); - // Called in a processing thread, so have to post message back to UI - // thread - sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING); - enable3ALocks(true); - } - - private void invokeOnRunnerDone(Object[] args) { - int runnerDoneResult = (Integer) args[0]; - synchronized (EffectsRecorder.this) { - if (mLogVerbose) { - Log.v(TAG, - "Graph runner done (" + EffectsRecorder.this - + ", mRunner " + mRunner - + ", mOldRunner " + mOldRunner + ")"); - } - if (runnerDoneResult == - (Integer) getConstant(sClsGraphRunner, "RESULT_ERROR")) { - // Handle error case - Log.e(TAG, "Error running filter graph!"); - Exception e = null; - if (mRunner != null) { - e = (Exception) invoke(mRunner, sGraphRunnerGetError); - } else if (mOldRunner != null) { - e = (Exception) invoke(mOldRunner, sGraphRunnerGetError); - } - raiseError(e); - } - if (mOldRunner != null) { - // Tear down old graph if available - if (mLogVerbose) Log.v(TAG, "Tearing down old graph."); - Object glEnv = getContextGLEnvironment(mGraphEnv); - if (glEnv != null && !(Boolean) invoke(glEnv, sGLEnvironmentIsActive)) { - invoke(glEnv, sGLEnvironmentActivate); - } - getGraphTearDown(mOldRunner, - invoke(mGraphEnv, sGraphEnvironmentGetContext)); - if (glEnv != null && (Boolean) invoke(glEnv, sGLEnvironmentIsActive)) { - invoke(glEnv, sGLEnvironmentDeactivate); - } - mOldRunner = null; - } - if (mState == STATE_PREVIEW || - mState == STATE_STARTING_PREVIEW) { - // Switching effects, start up the new runner - if (mLogVerbose) { - Log.v(TAG, "Previous effect halted. Running graph again. state: " - + mState); - } - tryEnable3ALocks(false); - // In case of an error, the graph restarts from beginning and in case - // of the BACKDROPPER effect, the learner re-learns the background. - // Hence, we need to show the learning dialogue to the user - // to avoid recording before the learning is done. Else, the user - // could start recording before the learning is done and the new - // background comes up later leading to an end result video - // with a heterogeneous background. - // For BACKDROPPER effect, this path is also executed sometimes at - // the end of a normal recording session. In such a case, the graph - // does not restart and hence the learner does not re-learn. So we - // do not want to show the learning dialogue then. - if (runnerDoneResult == (Integer) getConstant( - sClsGraphRunner, "RESULT_ERROR") - && mCurrentEffect == EFFECT_BACKDROPPER) { - sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING); - } - invoke(mRunner, sGraphRunnerRun); - } else if (mState != STATE_RELEASED) { - // Shutting down effects - if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview"); - tryEnable3ALocks(false); - sendMessage(EFFECT_NONE, EFFECT_MSG_EFFECTS_STOPPED); - } else { - // STATE_RELEASED - camera will be/has been released as well, do nothing. - } - } - } - - private void invokeOnSurfaceTextureSourceReady(Object[] args) { - SurfaceTexture source = (SurfaceTexture) args[0]; - if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received"); - synchronized (EffectsRecorder.this) { - mTextureSource = source; - - if (mState == STATE_CONFIGURE) { - // Stop preview happened while the runner was doing startup tasks - // Since we haven't started anything up, don't do anything - // Rest of cleanup will happen in onRunnerDone - if (mLogVerbose) Log.v(TAG, "Ready callback: Already stopped, skipping."); - return; - } - if (mState == STATE_RELEASED) { - // EffectsRecorder has been released, so don't touch the camera device - // or anything else - if (mLogVerbose) Log.v(TAG, "Ready callback: Already released, skipping."); - return; - } - if (source == null) { - if (mLogVerbose) { - Log.v(TAG, "Ready callback: source null! Looks like graph was closed!"); - } - if (mState == STATE_PREVIEW || - mState == STATE_STARTING_PREVIEW || - mState == STATE_RECORD) { - // A null source here means the graph is shutting down - // unexpectedly, so we need to turn off preview before - // the surface texture goes away. - if (mLogVerbose) { - Log.v(TAG, "Ready callback: State: " + mState - + ". stopCameraPreview"); - } - - stopCameraPreview(); - } - return; - } - - // Lock AE/AWB to reduce transition flicker - tryEnable3ALocks(true); - - mCameraDevice.stopPreview(); - if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview"); - mCameraDevice.setPreviewTexture(mTextureSource); - - mCameraDevice.startPreview(); - - // Unlock AE/AWB after preview started - tryEnable3ALocks(false); - - mState = STATE_PREVIEW; - - if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete"); - - // Sending a message to listener that preview is complete - sendMessage(mCurrentEffect, EFFECT_MSG_PREVIEW_RUNNING); - } - } - - private void invokeOnRecordingDone() { - // Forward the callback to the VideoModule object (as an asynchronous event). - if (mLogVerbose) Log.v(TAG, "Recording done callback triggered"); - sendMessage(EFFECT_NONE, EFFECT_MSG_RECORDING_DONE); - } - - public synchronized void startRecording() { - if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")"); - - switch (mState) { - case STATE_RECORD: - throw new RuntimeException("Already recording, cannot begin anew!"); - case STATE_RELEASED: - throw new RuntimeException( - "startRecording called on an already released recorder!"); - default: - break; - } - - if ((mOutputFile == null) && (mFd == null)) { - throw new RuntimeException("No output file name or descriptor provided!"); - } - - if (mState == STATE_CONFIGURE) { - startPreview(); - } - - Object recorder = getGraphFilter(mRunner, "recorder"); - if (mFd != null) { - setInputValue(recorder, "outputFileDescriptor", mFd); - } else { - setInputValue(recorder, "outputFile", mOutputFile); - } - // It is ok to set the audiosource without checking for timelapse here - // since that check will be done in the MediaEncoderFilter itself - setInputValue(recorder, "audioSource", MediaRecorder.AudioSource.CAMCORDER); - setInputValue(recorder, "recordingProfile", mProfile); - setInputValue(recorder, "orientationHint", mOrientationHint); - // Important to set the timelapseinterval to 0 if the capture rate is not >0 - // since the recorder does not get created every time the recording starts. - // The recorder infers whether the capture is timelapsed based on the value of - // this interval - boolean captureTimeLapse = mCaptureRate > 0; - if (captureTimeLapse) { - double timeBetweenFrameCapture = 1 / mCaptureRate; - setInputValue(recorder, "timelapseRecordingIntervalUs", - (long) (1000000 * timeBetweenFrameCapture)); - - } else { - setInputValue(recorder, "timelapseRecordingIntervalUs", 0L); - } - - if (mInfoListener != null) { - setInputValue(recorder, "infoListener", mInfoListener); - } - if (mErrorListener != null) { - setInputValue(recorder, "errorListener", mErrorListener); - } - setInputValue(recorder, "maxFileSize", mMaxFileSize); - setInputValue(recorder, "maxDurationMs", mMaxDurationMs); - setInputValue(recorder, "recording", true); - mSoundPlayer.play(SoundClips.START_VIDEO_RECORDING); - mState = STATE_RECORD; - } - - public synchronized void stopRecording() { - if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")"); - - switch (mState) { - case STATE_CONFIGURE: - case STATE_STARTING_PREVIEW: - case STATE_PREVIEW: - Log.w(TAG, "StopRecording called when recording not active!"); - return; - case STATE_RELEASED: - throw new RuntimeException("stopRecording called on released EffectsRecorder!"); - default: - break; - } - Object recorder = getGraphFilter(mRunner, "recorder"); - setInputValue(recorder, "recording", false); - mSoundPlayer.play(SoundClips.STOP_VIDEO_RECORDING); - mState = STATE_PREVIEW; - } - - // Called to tell the filter graph that the display surfacetexture is not valid anymore. - // So the filter graph should not hold any reference to the surface created with that. - public synchronized void disconnectDisplay() { - if (mLogVerbose) Log.v(TAG, "Disconnecting the graph from the " + - "SurfaceTexture"); - Object display = getGraphFilter(mRunner, "display"); - invoke(display, sSurfaceTextureTargetDisconnect, new Object[] { - invoke(mGraphEnv, sGraphEnvironmentGetContext)}); - } - - // The VideoModule will call this to notify that the camera is being - // released to the outside world. This call should happen after the - // stopRecording call. Else, the effects may throw an exception. - // With the recording stopped, the stopPreview call will not try to - // release the camera again. - // This must be called in onPause() if the effects are ON. - public synchronized void disconnectCamera() { - if (mLogVerbose) Log.v(TAG, "Disconnecting the effects from Camera"); - stopCameraPreview(); - mCameraDevice = null; - } - - // In a normal case, when the disconnect is not called, we should not - // set the camera device to null, since on return callback, we try to - // enable 3A locks, which need the cameradevice. - public synchronized void stopCameraPreview() { - if (mLogVerbose) Log.v(TAG, "Stopping camera preview."); - if (mCameraDevice == null) { - Log.d(TAG, "Camera already null. Nothing to disconnect"); - return; - } - mCameraDevice.stopPreview(); - mCameraDevice.setPreviewTexture(null); - } - - // Stop and release effect resources - public synchronized void stopPreview() { - if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")"); - switch (mState) { - case STATE_CONFIGURE: - Log.w(TAG, "StopPreview called when preview not active!"); - return; - case STATE_RELEASED: - throw new RuntimeException("stopPreview called on released EffectsRecorder!"); - default: - break; - } - - if (mState == STATE_RECORD) { - stopRecording(); - } - - mCurrentEffect = EFFECT_NONE; - - // This will not do anything if the camera has already been disconnected. - stopCameraPreview(); - - mState = STATE_CONFIGURE; - mOldRunner = mRunner; - invoke(mRunner, sGraphRunnerStop); - mRunner = null; - // Rest of stop and release handled in mRunnerDoneCallback - } - - // Try to enable/disable video stabilization if supported; otherwise return false - // It is called from a synchronized block. - boolean tryEnableVideoStabilization(boolean toggle) { - if (mLogVerbose) Log.v(TAG, "tryEnableVideoStabilization."); - if (mCameraDevice == null) { - Log.d(TAG, "Camera already null. Not enabling video stabilization."); - return false; - } - Camera.Parameters params = mCameraDevice.getParameters(); - - String vstabSupported = params.get("video-stabilization-supported"); - if ("true".equals(vstabSupported)) { - if (mLogVerbose) Log.v(TAG, "Setting video stabilization to " + toggle); - params.set("video-stabilization", toggle ? "true" : "false"); - mCameraDevice.setParameters(params); - return true; - } - if (mLogVerbose) Log.v(TAG, "Video stabilization not supported"); - return false; - } - - // Try to enable/disable 3A locks if supported; otherwise return false - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) - synchronized boolean tryEnable3ALocks(boolean toggle) { - if (mLogVerbose) Log.v(TAG, "tryEnable3ALocks"); - if (mCameraDevice == null) { - Log.d(TAG, "Camera already null. Not tryenabling 3A locks."); - return false; - } - Camera.Parameters params = mCameraDevice.getParameters(); - if (CameraUtil.isAutoExposureLockSupported(params) && - CameraUtil.isAutoWhiteBalanceLockSupported(params)) { - params.setAutoExposureLock(toggle); - params.setAutoWhiteBalanceLock(toggle); - mCameraDevice.setParameters(params); - return true; - } - return false; - } - - // Try to enable/disable 3A locks if supported; otherwise, throw error - // Use this when locks are essential to success - synchronized void enable3ALocks(boolean toggle) { - if (mLogVerbose) Log.v(TAG, "Enable3ALocks"); - if (mCameraDevice == null) { - Log.d(TAG, "Camera already null. Not enabling 3A locks."); - return; - } - Camera.Parameters params = mCameraDevice.getParameters(); - if (!tryEnable3ALocks(toggle)) { - throw new RuntimeException("Attempt to lock 3A on camera with no locking support!"); - } - } - - static class SerializableInvocationHandler - implements InvocationHandler, Serializable { - private final int mEffectsRecorderIndex; - public SerializableInvocationHandler(int index) { - mEffectsRecorderIndex = index; - } - - @Override - public Object invoke(Object proxy, Method method, Object[] args) - throws Throwable { - if (sEffectsRecorder == null) return null; - if (mEffectsRecorderIndex != sEffectsRecorderIndex) { - Log.v(TAG, "Ignore old callback " + mEffectsRecorderIndex); - return null; - } - if (method.equals(sObjectEquals)) { - return sEffectsRecorder.invokeObjectEquals(proxy, args); - } else if (method.equals(sObjectToString)) { - return sEffectsRecorder.invokeObjectToString(); - } else if (method.equals(sLearningDoneListenerOnLearningDone)) { - sEffectsRecorder.invokeOnLearningDone(); - } else if (method.equals(sOnRunnerDoneListenerOnRunnerDone)) { - sEffectsRecorder.invokeOnRunnerDone(args); - } else if (method.equals( - sSurfaceTextureSourceListenerOnSurfaceTextureSourceReady)) { - sEffectsRecorder.invokeOnSurfaceTextureSourceReady(args); - } else if (method.equals(sOnRecordingDoneListenerOnRecordingDone)) { - sEffectsRecorder.invokeOnRecordingDone(); - } - return null; - } - } - - // Indicates that all camera/recording activity needs to halt - public synchronized void release() { - if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")"); - - switch (mState) { - case STATE_RECORD: - case STATE_STARTING_PREVIEW: - case STATE_PREVIEW: - stopPreview(); - // Fall-through - default: - if (mSoundPlayer != null) { - mSoundPlayer.release(); - mSoundPlayer = null; - } - mState = STATE_RELEASED; - break; - } - sEffectsRecorder = null; - } - - private void sendMessage(final int effect, final int msg) { - if (mEffectsListener != null) { - mHandler.post(new Runnable() { - @Override - public void run() { - mEffectsListener.onEffectsUpdate(effect, msg); - } - }); - } - } - - private void raiseError(final Exception exception) { - if (mEffectsListener != null) { - mHandler.post(new Runnable() { - @Override - public void run() { - if (mFd != null) { - mEffectsListener.onEffectsError(exception, null); - } else { - mEffectsListener.onEffectsError(exception, mOutputFile); - } - } - }); - } - } - - // invoke method on receiver with no arguments - private Object invoke(Object receiver, Method method) { - try { - return method.invoke(receiver); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - // invoke method on receiver with arguments - private Object invoke(Object receiver, Method method, Object[] args) { - try { - return method.invoke(receiver, args); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private void setInputValue(Object receiver, String key, Object value) { - try { - sFilterSetInputValue.invoke(receiver, new Object[] {key, value}); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private Object newInstance(Constructor<?> ct, Object[] initArgs) { - try { - return ct.newInstance(initArgs); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private Object newInstance(Constructor<?> ct) { - try { - return ct.newInstance(); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private Object getGraphFilter(Object receiver, String name) { - try { - return sFilterGraphGetFilter.invoke(sGraphRunnerGetGraph - .invoke(receiver), new Object[] {name}); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private Object getContextGLEnvironment(Object receiver) { - try { - return sFilterContextGetGLEnvironment - .invoke(sGraphEnvironmentGetContext.invoke(receiver)); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private void getGraphTearDown(Object receiver, Object filterContext) { - try { - sFilterGraphTearDown.invoke(sGraphRunnerGetGraph.invoke(receiver), - new Object[]{filterContext}); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } - - private Object getConstant(Class<?> cls, String name) { - try { - return cls.getDeclaredField(name).get(null); - } catch (Exception ex) { - throw new RuntimeException(ex); - } - } -} diff --git a/src/com/android/camera/FocusOverlayManager.java b/src/com/android/camera/FocusOverlayManager.java index d3215e7a6..a41959b08 100644 --- a/src/com/android/camera/FocusOverlayManager.java +++ b/src/com/android/camera/FocusOverlayManager.java @@ -16,23 +16,23 @@ package com.android.camera; -import java.util.ArrayList; -import java.util.List; - import android.annotation.TargetApi; import android.graphics.Matrix; import android.graphics.Rect; import android.graphics.RectF; import android.hardware.Camera.Area; import android.hardware.Camera.Parameters; +import android.os.Build; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; -import com.android.camera.util.ApiHelper; import com.android.camera.util.CameraUtil; +import java.util.ArrayList; +import java.util.List; + /* A class that handles everything about focus in still picture mode. * This also handles the metering area because it is the same as focus area. * @@ -316,7 +316,7 @@ public class FocusOverlayManager { mPreviousMoving = moving; } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) + @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initializeFocusAreas(int x, int y) { if (mFocusArea == null) { mFocusArea = new ArrayList<Object>(); @@ -327,7 +327,7 @@ public class FocusOverlayManager { calculateTapArea(x, y, 1f, ((Area) mFocusArea.get(0)).rect); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) + @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) private void initializeMeteringAreas(int x, int y) { if (mMeteringArea == null) { mMeteringArea = new ArrayList<Object>(); diff --git a/src/com/android/camera/MosaicPreviewRenderer.java b/src/com/android/camera/MosaicPreviewRenderer.java index e8c02db24..42da4d9e7 100644 --- a/src/com/android/camera/MosaicPreviewRenderer.java +++ b/src/com/android/camera/MosaicPreviewRenderer.java @@ -16,7 +16,6 @@ package com.android.camera; -import android.annotation.TargetApi; import android.graphics.SurfaceTexture; import android.os.ConditionVariable; import android.os.Handler; @@ -24,11 +23,8 @@ import android.os.HandlerThread; import android.os.Looper; import android.os.Message; -import com.android.camera.util.ApiHelper; - import javax.microedition.khronos.opengles.GL10; -@TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) // uses SurfaceTexture public class MosaicPreviewRenderer { @SuppressWarnings("unused") @@ -117,11 +113,8 @@ public class MosaicPreviewRenderer { mEglThread.quit(); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void releaseSurfaceTexture(SurfaceTexture st) { - if (ApiHelper.HAS_RELEASE_SURFACE_TEXTURE) { - st.release(); - } + st.release(); } // Should be called from other thread. diff --git a/src/com/android/camera/PhotoModule.java b/src/com/android/camera/PhotoModule.java index e3bdc25fe..861dead2d 100644 --- a/src/com/android/camera/PhotoModule.java +++ b/src/com/android/camera/PhotoModule.java @@ -38,6 +38,7 @@ import android.hardware.SensorManager; import android.location.Location; import android.media.CameraProfile; import android.net.Uri; +import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.os.Looper; @@ -481,11 +482,9 @@ public class PhotoModule mZoomValue = 0; openCameraCommon(); - if (ApiHelper.HAS_SURFACE_TEXTURE) { - // Start switch camera animation. Post a message because - // onFrameAvailable from the old camera may already exist. - mHandler.sendEmptyMessage(SWITCH_CAMERA_START_ANIMATION); - } + // Start switch camera animation. Post a message because + // onFrameAvailable from the old camera may already exist. + mHandler.sendEmptyMessage(SWITCH_CAMERA_START_ANIMATION); } protected void setCameraId(int cameraId) { @@ -594,10 +593,8 @@ public class PhotoModule }); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void startFaceDetection() { - if (!ApiHelper.HAS_FACE_DETECTION) return; if (mFaceDetectionStarted) return; if (mParameters.getMaxNumDetectedFaces() > 0) { mFaceDetectionStarted = true; @@ -609,10 +606,8 @@ public class PhotoModule } } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void stopFaceDetection() { - if (!ApiHelper.HAS_FACE_DETECTION) return; if (!mFaceDetectionStarted) return; if (mParameters.getMaxNumDetectedFaces() > 0) { mFaceDetectionStarted = false; @@ -710,14 +705,7 @@ public class PhotoModule mFocusManager.updateFocusUI(); // Ensure focus indicator is hidden. if (!mIsImageCaptureIntent) { - if (ApiHelper.CAN_START_PREVIEW_IN_JPEG_CALLBACK) { - setupPreview(); - } else { - // Camera HAL of some devices have a bug. Starting preview - // immediately after taking a picture will fail. Wait some - // time before starting the preview. - mHandler.sendEmptyMessageDelayed(SETUP_PREVIEW, 300); - } + setupPreview(); } ExifInterface exif = Exif.getExif(jpegData); @@ -792,7 +780,7 @@ public class PhotoModule } } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private final class AutoFocusMoveCallback implements CameraAFMoveCallback { @Override @@ -1446,13 +1434,10 @@ public class PhotoModule return false; } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void closeCamera() { if (mCameraDevice != null) { mCameraDevice.setZoomChangeListener(null); - if(ApiHelper.HAS_FACE_DETECTION) { - mCameraDevice.setFaceDetectionCallback(null, null); - } + mCameraDevice.setFaceDetectionCallback(null, null); mCameraDevice.setErrorCallback(null); CameraHolder.instance().release(); mFaceDetectionStarted = false; @@ -1566,28 +1551,26 @@ public class PhotoModule } } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void setAutoExposureLockIfSupported() { if (mAeLockSupported) { mParameters.setAutoExposureLock(mFocusManager.getAeAwbLock()); } } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void setAutoWhiteBalanceLockIfSupported() { if (mAwbLockSupported) { mParameters.setAutoWhiteBalanceLock(mFocusManager.getAeAwbLock()); } } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void setFocusAreasIfSupported() { if (mFocusAreaSupported) { mParameters.setFocusAreas(mFocusManager.getFocusAreas()); } } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void setMeteringAreasIfSupported() { if (mMeteringAreaSupported) { // Use the same area for focus and metering. @@ -1723,7 +1706,7 @@ public class PhotoModule } } - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private void updateAutoFocusMoveCallback() { if (mParameters.getFocusMode().equals(CameraUtil.FOCUS_MODE_CONTINUOUS_PICTURE)) { mCameraDevice.setAutoFocusMoveCallback(mHandler, diff --git a/src/com/android/camera/PhotoUI.java b/src/com/android/camera/PhotoUI.java index 41847ed1c..3f1a27ddf 100644 --- a/src/com/android/camera/PhotoUI.java +++ b/src/com/android/camera/PhotoUI.java @@ -42,8 +42,6 @@ import android.widget.Toast; import com.android.camera.CameraPreference.OnPreferenceChangedListener; import com.android.camera.FocusOverlayManager.FocusUI; -import com.android.camera.ui.ModuleSwitcher; -import com.android.camera.util.ApiHelper; import com.android.camera.ui.AbstractSettingPopup; import com.android.camera.ui.CameraControls; import com.android.camera.ui.CameraRootView; @@ -51,6 +49,7 @@ import com.android.camera.ui.CountDownView; import com.android.camera.ui.CountDownView.OnCountDownFinishedListener; import com.android.camera.ui.FaceView; import com.android.camera.ui.FocusIndicator; +import com.android.camera.ui.ModuleSwitcher; import com.android.camera.ui.PieRenderer; import com.android.camera.ui.PieRenderer.PieListener; import com.android.camera.ui.RenderOverlay; @@ -214,14 +213,12 @@ public class PhotoUI implements PieListener, mSwitcher.setCurrentIndex(ModuleSwitcher.PHOTO_MODULE_INDEX); mSwitcher.setSwitchListener(mActivity); mMenuButton = mRootView.findViewById(R.id.menu); - if (ApiHelper.HAS_FACE_DETECTION) { - ViewStub faceViewStub = (ViewStub) mRootView - .findViewById(R.id.face_view_stub); - if (faceViewStub != null) { - faceViewStub.inflate(); - mFaceView = (FaceView) mRootView.findViewById(R.id.face_view); - setSurfaceTextureSizeChangedListener(mFaceView); - } + ViewStub faceViewStub = (ViewStub) mRootView + .findViewById(R.id.face_view_stub); + if (faceViewStub != null) { + faceViewStub.inflate(); + mFaceView = (FaceView) mRootView.findViewById(R.id.face_view); + setSurfaceTextureSizeChangedListener(mFaceView); } mCameraControls = (CameraControls) mRootView.findViewById(R.id.camera_controls); ((CameraRootView) mRootView).setDisplayChangeListener(this); diff --git a/src/com/android/camera/SoundClips.java b/src/com/android/camera/SoundClips.java index f981bd238..3130aac68 100644 --- a/src/com/android/camera/SoundClips.java +++ b/src/com/android/camera/SoundClips.java @@ -21,6 +21,7 @@ import android.content.Context; import android.media.AudioManager; import android.media.MediaActionSound; import android.media.SoundPool; +import android.os.Build; import android.util.Log; import com.android.camera2.R; @@ -49,6 +50,7 @@ public class SoundClips { } public static int getAudioTypeForSoundPool() { + // STREAM_SYSTEM_ENFORCED is hidden API. return ApiHelper.getIntFieldIfExists(AudioManager.class, "STREAM_SYSTEM_ENFORCED", null, AudioManager.STREAM_RING); } @@ -57,7 +59,7 @@ public class SoundClips { * This class implements SoundClips.Player using MediaActionSound, * which exists since API level 16. */ - @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN) + @TargetApi(Build.VERSION_CODES.JELLY_BEAN) private static class MediaActionSoundPlayer implements Player { private static final String TAG = "MediaActionSoundPlayer"; private MediaActionSound mSound; diff --git a/src/com/android/camera/VideoModule.java b/src/com/android/camera/VideoModule.java index b30905fb8..d6c88576d 100644 --- a/src/com/android/camera/VideoModule.java +++ b/src/com/android/camera/VideoModule.java @@ -57,10 +57,10 @@ import com.android.camera.CameraManager.CameraPictureCallback; import com.android.camera.CameraManager.CameraProxy; import com.android.camera.app.OrientationManager; import com.android.camera.exif.ExifInterface; -import com.android.camera.util.ApiHelper; -import com.android.camera.util.AccessibilityUtils; import com.android.camera.ui.PopupManager; import com.android.camera.ui.RotateTextToast; +import com.android.camera.util.AccessibilityUtils; +import com.android.camera.util.ApiHelper; import com.android.camera.util.CameraUtil; import com.android.camera.util.UsageStatistics; import com.android.camera2.R; @@ -174,9 +174,6 @@ public class VideoModule implements CameraModule, private int mZoomValue; // The current zoom value. - private boolean mRestoreFlash; // This is used to check if we need to restore the flash - // status when going back from gallery. - private final MediaSaveService.OnMediaSavedListener mOnVideoSavedListener = new MediaSaveService.OnMediaSavedListener() { @Override @@ -588,13 +585,11 @@ public class VideoModule implements CameraModule, } // Read time lapse recording interval. - if (ApiHelper.HAS_TIME_LAPSE_RECORDING) { - String frameIntervalStr = mPreferences.getString( - CameraSettings.KEY_VIDEO_TIME_LAPSE_FRAME_INTERVAL, - mActivity.getString(R.string.pref_video_time_lapse_frame_interval_default)); - mTimeBetweenTimeLapseFrameCaptureMs = Integer.parseInt(frameIntervalStr); - mCaptureTimeLapse = (mTimeBetweenTimeLapseFrameCaptureMs != 0); - } + String frameIntervalStr = mPreferences.getString( + CameraSettings.KEY_VIDEO_TIME_LAPSE_FRAME_INTERVAL, + mActivity.getString(R.string.pref_video_time_lapse_frame_interval_default)); + mTimeBetweenTimeLapseFrameCaptureMs = Integer.parseInt(frameIntervalStr); + mCaptureTimeLapse = (mTimeBetweenTimeLapseFrameCaptureMs != 0); // TODO: This should be checked instead directly +1000. if (mCaptureTimeLapse) quality += 1000; mProfile = CamcorderProfile.get(mCameraId, quality); @@ -602,33 +597,28 @@ public class VideoModule implements CameraModule, mPreferenceRead = true; } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) + @TargetApi(Build.VERSION_CODES.HONEYCOMB) private void getDesiredPreviewSize() { mParameters = mCameraDevice.getParameters(); - if (ApiHelper.HAS_GET_SUPPORTED_VIDEO_SIZE) { - if (mParameters.getSupportedVideoSizes() == null) { - mDesiredPreviewWidth = mProfile.videoFrameWidth; - mDesiredPreviewHeight = mProfile.videoFrameHeight; - } else { // Driver supports separates outputs for preview and video. - List<Size> sizes = mParameters.getSupportedPreviewSizes(); - Size preferred = mParameters.getPreferredPreviewSizeForVideo(); - int product = preferred.width * preferred.height; - Iterator<Size> it = sizes.iterator(); - // Remove the preview sizes that are not preferred. - while (it.hasNext()) { - Size size = it.next(); - if (size.width * size.height > product) { - it.remove(); - } - } - Size optimalSize = CameraUtil.getOptimalPreviewSize(mActivity, sizes, - (double) mProfile.videoFrameWidth / mProfile.videoFrameHeight); - mDesiredPreviewWidth = optimalSize.width; - mDesiredPreviewHeight = optimalSize.height; - } - } else { + if (mParameters.getSupportedVideoSizes() == null) { mDesiredPreviewWidth = mProfile.videoFrameWidth; mDesiredPreviewHeight = mProfile.videoFrameHeight; + } else { // Driver supports separates outputs for preview and video. + List<Size> sizes = mParameters.getSupportedPreviewSizes(); + Size preferred = mParameters.getPreferredPreviewSizeForVideo(); + int product = preferred.width * preferred.height; + Iterator<Size> it = sizes.iterator(); + // Remove the preview sizes that are not preferred. + while (it.hasNext()) { + Size size = it.next(); + if (size.width * size.height > product) { + it.remove(); + } + } + Size optimalSize = CameraUtil.getOptimalPreviewSize(mActivity, sizes, + (double) mProfile.videoFrameWidth / mProfile.videoFrameHeight); + mDesiredPreviewWidth = optimalSize.width; + mDesiredPreviewHeight = optimalSize.height; } mUI.setPreviewSize(mDesiredPreviewWidth, mDesiredPreviewHeight); Log.v(TAG, "mDesiredPreviewWidth=" + mDesiredPreviewWidth + @@ -1053,19 +1043,15 @@ public class VideoModule implements CameraModule, mMediaRecorder.setOnInfoListener(this); } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) private static void setCaptureRate(MediaRecorder recorder, double fps) { recorder.setCaptureRate(fps); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private void setRecordLocation() { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) { - Location loc = mLocationManager.getCurrentLocation(); - if (loc != null) { - mMediaRecorder.setLocation((float) loc.getLatitude(), - (float) loc.getLongitude()); - } + Location loc = mLocationManager.getCurrentLocation(); + if (loc != null) { + mMediaRecorder.setLocation((float) loc.getLatitude(), + (float) loc.getLongitude()); } } @@ -1235,9 +1221,7 @@ public class VideoModule implements CameraModule, // recording. We need to alter the parameters if we support camcorder // zoom. To reduce latency when setting the parameters during zoom, we // update mParameters here once. - if (ApiHelper.HAS_ZOOM_WHEN_RECORDING) { - mParameters = mCameraDevice.getParameters(); - } + mParameters = mCameraDevice.getParameters(); mUI.enableCameraControls(false); @@ -1660,20 +1644,6 @@ public class VideoModule implements CameraModule, public void onCaptureTextureCopied() { } - // Verifies that the current preview view size is correct before starting - // preview. If not, resets the surface texture and resizes the view. - private void checkQualityAndStartPreview() { - readVideoPreferences(); - mUI.showTimeLapseUI(mCaptureTimeLapse); - Size size = mParameters.getPreviewSize(); - if (size.width != mDesiredPreviewWidth - || size.height != mDesiredPreviewHeight) { - resizeForPreviewAspectRatio(); - } - // Start up preview again - startPreview(); - } - private void initializeVideoSnapshot() { if (mParameters == null) return; if (CameraUtil.isVideoSnapshotSupported(mParameters) && !mIsVideoCaptureIntent) { diff --git a/src/com/android/camera/VideoUI.java b/src/com/android/camera/VideoUI.java index 204195f44..464cf0ee8 100644 --- a/src/com/android/camera/VideoUI.java +++ b/src/com/android/camera/VideoUI.java @@ -16,8 +16,6 @@ package com.android.camera; -import java.util.List; - import android.graphics.Bitmap; import android.graphics.Color; import android.graphics.Matrix; @@ -53,7 +51,8 @@ import com.android.camera.ui.RotateLayout; import com.android.camera.ui.ZoomRenderer; import com.android.camera.util.CameraUtil; import com.android.camera2.R; -import com.android.camera.util.ApiHelper; + +import java.util.List; public class VideoUI implements PieRenderer.PieListener, PreviewGestures.SingleTapListener, @@ -559,23 +558,12 @@ public class VideoUI implements PieRenderer.PieListener, hideSwitcher(); mRecordingTimeView.setText(""); mRecordingTimeView.setVisibility(View.VISIBLE); - // The camera is not allowed to be accessed in older api levels during - // recording. It is therefore necessary to hide the zoom UI on older - // platforms. - // See the documentation of android.media.MediaRecorder.start() for - // further explanation. - if (!ApiHelper.HAS_ZOOM_WHEN_RECORDING && zoomSupported) { - // TODO: disable zoom UI here. - } } else { mShutterButton.setImageResource(R.drawable.btn_new_shutter_video); if (!mController.isVideoCaptureIntent()) { showSwitcher(); } mRecordingTimeView.setVisibility(View.GONE); - if (!ApiHelper.HAS_ZOOM_WHEN_RECORDING && zoomSupported) { - // TODO: enable zoom UI here. - } } } diff --git a/src/com/android/camera/ui/CameraRootView.java b/src/com/android/camera/ui/CameraRootView.java index 49a157568..35a585e92 100644 --- a/src/com/android/camera/ui/CameraRootView.java +++ b/src/com/android/camera/ui/CameraRootView.java @@ -43,9 +43,6 @@ public class CameraRootView extends FrameLayout { private Object mDisplayListener; private MyDisplayListener mListener; - // Hideybars are available on K and up. - private static final boolean HIDEYBARS_ENABLED = CameraActivity.isKitKatOrHigher(); - public interface MyDisplayListener { public void onDisplayChanged(); } @@ -59,7 +56,7 @@ public class CameraRootView extends FrameLayout { @Override protected boolean fitSystemWindows(Rect insets) { - if (!HIDEYBARS_ENABLED) { + if (!ApiHelper.HAS_HIDEYBARS) { mCurrentInsets = insets; // insets include status bar, navigation bar, etc // In this case, we are only concerned with the size of nav bar diff --git a/src/com/android/camera/ui/EffectSettingPopup.java b/src/com/android/camera/ui/EffectSettingPopup.java deleted file mode 100644 index 1ee278d21..000000000 --- a/src/com/android/camera/ui/EffectSettingPopup.java +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (C) 2010 The Android Open Source Project - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package com.android.camera.ui; - -import java.util.ArrayList; -import java.util.HashMap; - -import android.annotation.TargetApi; -import android.content.Context; -import android.util.AttributeSet; -import android.util.Log; -import android.view.View; -import android.widget.AdapterView; -import android.widget.GridView; -import android.widget.SimpleAdapter; - -import com.android.camera.IconListPreference; -import com.android.camera2.R; -import com.android.camera.util.ApiHelper; - -// A popup window that shows video effect setting. It has two grid view. -// One shows the goofy face effects. The other shows the background replacer -// effects. -public class EffectSettingPopup extends AbstractSettingPopup implements - AdapterView.OnItemClickListener, View.OnClickListener { - private static final String TAG = "EffectSettingPopup"; - private String mNoEffect; - private IconListPreference mPreference; - private Listener mListener; - private View mClearEffects; - private GridView mSillyFacesGrid; - private GridView mBackgroundGrid; - - // Data for silly face items. (text, image, and preference value) - ArrayList<HashMap<String, Object>> mSillyFacesItem = - new ArrayList<HashMap<String, Object>>(); - - // Data for background replacer items. (text, image, and preference value) - ArrayList<HashMap<String, Object>> mBackgroundItem = - new ArrayList<HashMap<String, Object>>(); - - - static public interface Listener { - public void onSettingChanged(); - } - - public EffectSettingPopup(Context context, AttributeSet attrs) { - super(context, attrs); - mNoEffect = context.getString(R.string.pref_video_effect_default); - } - - @Override - protected void onFinishInflate() { - super.onFinishInflate(); - mClearEffects = findViewById(R.id.clear_effects); - mClearEffects.setOnClickListener(this); - mSillyFacesGrid = (GridView) findViewById(R.id.effect_silly_faces); - mBackgroundGrid = (GridView) findViewById(R.id.effect_background); - } - - public void initialize(IconListPreference preference) { - mPreference = preference; - Context context = getContext(); - CharSequence[] entries = mPreference.getEntries(); - CharSequence[] entryValues = mPreference.getEntryValues(); - int[] iconIds = mPreference.getImageIds(); - if (iconIds == null) { - iconIds = mPreference.getLargeIconIds(); - } - - // Set title. - mTitle.setText(mPreference.getTitle()); - - for(int i = 0; i < entries.length; ++i) { - String value = entryValues[i].toString(); - if (value.equals(mNoEffect)) continue; // no effect, skip it. - HashMap<String, Object> map = new HashMap<String, Object>(); - map.put("value", value); - map.put("text", entries[i].toString()); - if (iconIds != null) map.put("image", iconIds[i]); - if (value.startsWith("goofy_face")) { - mSillyFacesItem.add(map); - } else if (value.startsWith("backdropper")) { - mBackgroundItem.add(map); - } - } - - boolean hasSillyFaces = mSillyFacesItem.size() > 0; - boolean hasBackground = mBackgroundItem.size() > 0; - - // Initialize goofy face if it is supported. - if (hasSillyFaces) { - findViewById(R.id.effect_silly_faces_title).setVisibility(View.VISIBLE); - findViewById(R.id.effect_silly_faces_title_separator).setVisibility(View.VISIBLE); - mSillyFacesGrid.setVisibility(View.VISIBLE); - SimpleAdapter sillyFacesItemAdapter = new SimpleAdapter(context, - mSillyFacesItem, R.layout.effect_setting_item, - new String[] {"text", "image"}, - new int[] {R.id.text, R.id.image}); - mSillyFacesGrid.setAdapter(sillyFacesItemAdapter); - mSillyFacesGrid.setOnItemClickListener(this); - } - - if (hasSillyFaces && hasBackground) { - findViewById(R.id.effect_background_separator).setVisibility(View.VISIBLE); - } - - // Initialize background replacer if it is supported. - if (hasBackground) { - findViewById(R.id.effect_background_title).setVisibility(View.VISIBLE); - findViewById(R.id.effect_background_title_separator).setVisibility(View.VISIBLE); - mBackgroundGrid.setVisibility(View.VISIBLE); - SimpleAdapter backgroundItemAdapter = new SimpleAdapter(context, - mBackgroundItem, R.layout.effect_setting_item, - new String[] {"text", "image"}, - new int[] {R.id.text, R.id.image}); - mBackgroundGrid.setAdapter(backgroundItemAdapter); - mBackgroundGrid.setOnItemClickListener(this); - } - - reloadPreference(); - } - - @Override - public void setVisibility(int visibility) { - if (visibility == View.VISIBLE) { - if (getVisibility() != View.VISIBLE) { - // Do not show or hide "Clear effects" button when the popup - // is already visible. Otherwise it looks strange. - boolean noEffect = mPreference.getValue().equals(mNoEffect); - mClearEffects.setVisibility(noEffect ? View.GONE : View.VISIBLE); - } - reloadPreference(); - } - super.setVisibility(visibility); - } - - // The value of the preference may have changed. Update the UI. - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) - @Override - public void reloadPreference() { - mBackgroundGrid.setItemChecked(mBackgroundGrid.getCheckedItemPosition(), false); - mSillyFacesGrid.setItemChecked(mSillyFacesGrid.getCheckedItemPosition(), false); - - String value = mPreference.getValue(); - if (value.equals(mNoEffect)) return; - - for (int i = 0; i < mSillyFacesItem.size(); i++) { - if (value.equals(mSillyFacesItem.get(i).get("value"))) { - mSillyFacesGrid.setItemChecked(i, true); - return; - } - } - - for (int i = 0; i < mBackgroundItem.size(); i++) { - if (value.equals(mBackgroundItem.get(i).get("value"))) { - mBackgroundGrid.setItemChecked(i, true); - return; - } - } - - Log.e(TAG, "Invalid preference value: " + value); - mPreference.print(); - } - - public void setSettingChangedListener(Listener listener) { - mListener = listener; - } - - @Override - public void onItemClick(AdapterView<?> parent, View view, - int index, long id) { - String value; - if (parent == mSillyFacesGrid) { - value = (String) mSillyFacesItem.get(index).get("value"); - } else if (parent == mBackgroundGrid) { - value = (String) mBackgroundItem.get(index).get("value"); - } else { - return; - } - - // Tapping the selected effect will deselect it (clear effects). - if (value.equals(mPreference.getValue())) { - mPreference.setValue(mNoEffect); - } else { - mPreference.setValue(value); - } - reloadPreference(); - if (mListener != null) mListener.onSettingChanged(); - } - - @Override - public void onClick(View v) { - // Clear the effect. - mPreference.setValue(mNoEffect); - reloadPreference(); - if (mListener != null) mListener.onSettingChanged(); - } -} diff --git a/src/com/android/camera/ui/FaceView.java b/src/com/android/camera/ui/FaceView.java index 7ec9b7e54..1b3a9c72e 100644 --- a/src/com/android/camera/ui/FaceView.java +++ b/src/com/android/camera/ui/FaceView.java @@ -16,7 +16,6 @@ package com.android.camera.ui; -import android.annotation.TargetApi; import android.content.Context; import android.content.res.Resources; import android.graphics.Canvas; @@ -31,12 +30,10 @@ import android.util.AttributeSet; import android.util.Log; import android.view.View; -import com.android.camera.util.CameraUtil; import com.android.camera.PhotoUI; +import com.android.camera.util.CameraUtil; import com.android.camera2.R; -import com.android.camera.util.ApiHelper; -@TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) public class FaceView extends View implements FocusIndicator, Rotatable, PhotoUI.SurfaceTextureSizeChangedListener { diff --git a/src/com/android/camera/ui/ModuleSwitcher.java b/src/com/android/camera/ui/ModuleSwitcher.java index 5eb316c7f..69ae3b57e 100644 --- a/src/com/android/camera/ui/ModuleSwitcher.java +++ b/src/com/android/camera/ui/ModuleSwitcher.java @@ -34,7 +34,6 @@ import android.view.ViewGroup; import android.widget.FrameLayout.LayoutParams; import android.widget.LinearLayout; -import com.android.camera.util.ApiHelper; import com.android.camera.util.CameraUtil; import com.android.camera.util.PhotoSphereHelper; import com.android.camera.util.UsageStatistics; @@ -317,9 +316,6 @@ public class ModuleSwitcher extends RotateImageView } private void popupAnimationSetup() { - if (!ApiHelper.HAS_VIEW_PROPERTY_ANIMATOR) { - return; - } layoutPopup(); mPopup.setScaleX(0.3f); mPopup.setScaleY(0.3f); @@ -329,9 +325,6 @@ public class ModuleSwitcher extends RotateImageView } private boolean animateHidePopup() { - if (!ApiHelper.HAS_VIEW_PROPERTY_ANIMATOR) { - return false; - } if (mHideAnimationListener == null) { mHideAnimationListener = new AnimatorListenerAdapter() { @Override @@ -358,9 +351,6 @@ public class ModuleSwitcher extends RotateImageView } private boolean animateShowPopup() { - if (!ApiHelper.HAS_VIEW_PROPERTY_ANIMATOR) { - return false; - } if (mNeedsAnimationSetup) { popupAnimationSetup(); } diff --git a/src/com/android/camera/ui/RotateLayout.java b/src/com/android/camera/ui/RotateLayout.java index 044da1cee..8539eb64c 100644 --- a/src/com/android/camera/ui/RotateLayout.java +++ b/src/com/android/camera/ui/RotateLayout.java @@ -16,19 +16,11 @@ package com.android.camera.ui; -import android.annotation.TargetApi; import android.content.Context; -import android.graphics.Canvas; import android.graphics.Matrix; -import android.graphics.Rect; import android.util.AttributeSet; -import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; -import android.view.ViewParent; - -import com.android.camera.util.MotionEventHelper; -import com.android.camera.util.ApiHelper; // A RotateLayout is designed to display a single item and provides the // capabilities to rotate the item. @@ -48,14 +40,11 @@ public class RotateLayout extends ViewGroup implements Rotatable { setBackgroundResource(android.R.color.transparent); } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) @Override protected void onFinishInflate() { mChild = getChildAt(0); - if (ApiHelper.HAS_VIEW_TRANSFORM_PROPERTIES) { - mChild.setPivotX(0); - mChild.setPivotY(0); - } + mChild.setPivotX(0); + mChild.setPivotY(0); } @Override @@ -76,60 +65,6 @@ public class RotateLayout extends ViewGroup implements Rotatable { } @Override - public boolean dispatchTouchEvent(MotionEvent event) { - if (!ApiHelper.HAS_VIEW_TRANSFORM_PROPERTIES) { - final int w = getMeasuredWidth(); - final int h = getMeasuredHeight(); - switch (mOrientation) { - case 0: - mMatrix.setTranslate(0, 0); - break; - case 90: - mMatrix.setTranslate(0, -h); - break; - case 180: - mMatrix.setTranslate(-w, -h); - break; - case 270: - mMatrix.setTranslate(-w, 0); - break; - } - mMatrix.postRotate(mOrientation); - event = MotionEventHelper.transformEvent(event, mMatrix); - } - return super.dispatchTouchEvent(event); - } - - @Override - protected void dispatchDraw(Canvas canvas) { - if (ApiHelper.HAS_VIEW_TRANSFORM_PROPERTIES) { - super.dispatchDraw(canvas); - } else { - canvas.save(); - int w = getMeasuredWidth(); - int h = getMeasuredHeight(); - switch (mOrientation) { - case 0: - canvas.translate(0, 0); - break; - case 90: - canvas.translate(0, h); - break; - case 180: - canvas.translate(w, h); - break; - case 270: - canvas.translate(w, 0); - break; - } - canvas.rotate(-mOrientation, 0, 0); - super.dispatchDraw(canvas); - canvas.restore(); - } - } - - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) - @Override protected void onMeasure(int widthSpec, int heightSpec) { int w = 0, h = 0; switch(mOrientation) { @@ -148,27 +83,25 @@ public class RotateLayout extends ViewGroup implements Rotatable { } setMeasuredDimension(w, h); - if (ApiHelper.HAS_VIEW_TRANSFORM_PROPERTIES) { - switch (mOrientation) { - case 0: - mChild.setTranslationX(0); - mChild.setTranslationY(0); - break; - case 90: - mChild.setTranslationX(0); - mChild.setTranslationY(h); - break; - case 180: - mChild.setTranslationX(w); - mChild.setTranslationY(h); - break; - case 270: - mChild.setTranslationX(w); - mChild.setTranslationY(0); - break; - } - mChild.setRotation(-mOrientation); + switch (mOrientation) { + case 0: + mChild.setTranslationX(0); + mChild.setTranslationY(0); + break; + case 90: + mChild.setTranslationX(0); + mChild.setTranslationY(h); + break; + case 180: + mChild.setTranslationX(w); + mChild.setTranslationY(h); + break; + case 270: + mChild.setTranslationX(w); + mChild.setTranslationY(0); + break; } + mChild.setRotation(-mOrientation); } @Override @@ -188,16 +121,4 @@ public class RotateLayout extends ViewGroup implements Rotatable { public int getOrientation() { return mOrientation; } - - @Override - public ViewParent invalidateChildInParent(int[] location, Rect r) { - if (!ApiHelper.HAS_VIEW_TRANSFORM_PROPERTIES && mOrientation != 0) { - // The workaround invalidates the entire rotate layout. After - // rotation, the correct area to invalidate may be larger than the - // size of the child. Ex: ListView. There is no way to invalidate - // only the necessary area. - r.set(0, 0, getWidth(), getHeight()); - } - return super.invalidateChildInParent(location, r); - } } diff --git a/src/com/android/camera/ui/Switch.java b/src/com/android/camera/ui/Switch.java index 4518dedf4..6c3399775 100644 --- a/src/com/android/camera/ui/Switch.java +++ b/src/com/android/camera/ui/Switch.java @@ -24,6 +24,7 @@ import android.graphics.Canvas; import android.graphics.Paint; import android.graphics.Rect; import android.graphics.drawable.Drawable; +import android.os.Build; import android.text.Layout; import android.text.StaticLayout; import android.text.TextPaint; @@ -39,7 +40,6 @@ import android.view.accessibility.AccessibilityNodeInfo; import android.widget.CompoundButton; import com.android.camera2.R; -import com.android.camera.util.ApiHelper; /** * A Switch is a two-state toggle switch widget that can select between two @@ -82,7 +82,6 @@ public class Switch extends CompoundButton { private Layout mOnLayout; private Layout mOffLayout; - @SuppressWarnings("hiding") private final Rect mTempRect = new Rect(); private static final int[] CHECKED_STATE_SET = { @@ -151,8 +150,6 @@ public class Switch extends CompoundButton { @Override public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { - int widthMode = MeasureSpec.getMode(widthMeasureSpec); - int widthSize = MeasureSpec.getSize(widthMeasureSpec); if (mOnLayout == null) { mOnLayout = makeLayout(mTextOn, mSwitchTextMaxWidth); } @@ -180,7 +177,7 @@ public class Switch extends CompoundButton { } } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) + @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onPopulateAccessibilityEvent(AccessibilityEvent event) { super.onPopulateAccessibilityEvent(event); @@ -464,7 +461,6 @@ public class Switch extends CompoundButton { return super.verifyDrawable(who) || who == mThumbDrawable || who == mTrackDrawable; } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) @Override public void jumpDrawablesToCurrentState() { super.jumpDrawablesToCurrentState(); @@ -472,14 +468,12 @@ public class Switch extends CompoundButton { mTrackDrawable.jumpToCurrentState(); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onInitializeAccessibilityEvent(AccessibilityEvent event) { super.onInitializeAccessibilityEvent(event); event.setClassName(Switch.class.getName()); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) { super.onInitializeAccessibilityNodeInfo(info); diff --git a/src/com/android/camera/util/ApiHelper.java b/src/com/android/camera/util/ApiHelper.java index 6cb303476..271957ee1 100644 --- a/src/com/android/camera/util/ApiHelper.java +++ b/src/com/android/camera/util/ApiHelper.java @@ -16,181 +16,33 @@ package com.android.camera.util; -import android.app.admin.DevicePolicyManager; -import android.content.ComponentName; -import android.hardware.Camera; import android.os.Build; -import android.provider.MediaStore.MediaColumns; -import android.view.View; -import android.view.WindowManager; import java.lang.reflect.Field; public class ApiHelper { - public static interface VERSION_CODES { - // These value are copied from Build.VERSION_CODES - public static final int GINGERBREAD_MR1 = 10; - public static final int HONEYCOMB = 11; - public static final int HONEYCOMB_MR1 = 12; - public static final int HONEYCOMB_MR2 = 13; - public static final int ICE_CREAM_SANDWICH = 14; - public static final int ICE_CREAM_SANDWICH_MR1 = 15; - public static final int JELLY_BEAN = 16; - public static final int JELLY_BEAN_MR1 = 17; - public static final int JELLY_BEAN_MR2 = 18; - } - public static final boolean AT_LEAST_16 = Build.VERSION.SDK_INT >= 16; - - public static final boolean USE_888_PIXEL_FORMAT = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean ENABLE_PHOTO_EDITOR = - Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH; - - public static final boolean HAS_VIEW_SYSTEM_UI_FLAG_LAYOUT_STABLE = - hasField(View.class, "SYSTEM_UI_FLAG_LAYOUT_STABLE"); - - public static final boolean HAS_VIEW_SYSTEM_UI_FLAG_HIDE_NAVIGATION = - hasField(View.class, "SYSTEM_UI_FLAG_HIDE_NAVIGATION"); - public static final boolean HAS_MEDIA_COLUMNS_WIDTH_AND_HEIGHT = - hasField(MediaColumns.class, "WIDTH"); - - public static final boolean HAS_REUSING_BITMAP_IN_BITMAP_REGION_DECODER = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_REUSING_BITMAP_IN_BITMAP_FACTORY = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_SET_BEAM_PUSH_URIS = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_SET_DEFALT_BUFFER_SIZE = hasMethod( - "android.graphics.SurfaceTexture", "setDefaultBufferSize", - int.class, int.class); - - public static final boolean HAS_RELEASE_SURFACE_TEXTURE = hasMethod( - "android.graphics.SurfaceTexture", "release"); - - public static final boolean HAS_SURFACE_TEXTURE = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_MTP = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB_MR1; - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN; public static final boolean HAS_AUTO_FOCUS_MOVE_CALLBACK = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_REMOTE_VIEWS_SERVICE = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_INTENT_EXTRA_LOCAL_ONLY = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_SET_SYSTEM_UI_VISIBILITY = - hasMethod(View.class, "setSystemUiVisibility", int.class); - - public static final boolean HAS_FACE_DETECTION; - static { - boolean hasFaceDetection = false; - try { - Class<?> listenerClass = Class.forName( - "android.hardware.Camera$FaceDetectionListener"); - hasFaceDetection = - hasMethod(Camera.class, "setFaceDetectionListener", listenerClass) && - hasMethod(Camera.class, "startFaceDetection") && - hasMethod(Camera.class, "stopFaceDetection") && - hasMethod(Camera.Parameters.class, "getMaxNumDetectedFaces"); - } catch (Throwable t) { - } - HAS_FACE_DETECTION = hasFaceDetection; - } - - public static final boolean HAS_GET_CAMERA_DISABLED = - hasMethod(DevicePolicyManager.class, "getCameraDisabled", ComponentName.class); - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN; public static final boolean HAS_MEDIA_ACTION_SOUND = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_TIME_LAPSE_RECORDING = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_ZOOM_WHEN_RECORDING = - Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH; - - public static final boolean HAS_CAMERA_FOCUS_AREA = - Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH; - - public static final boolean HAS_CAMERA_METERING_AREA = - Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH; - - public static final boolean HAS_MOTION_EVENT_TRANSFORM = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_EFFECTS_RECORDING = false; - - // "Background" filter does not have "context" input port in jelly bean. - public static final boolean HAS_EFFECTS_RECORDING_CONTEXT_INPUT = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1; - - public static final boolean HAS_GET_SUPPORTED_VIDEO_SIZE = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_SET_ICON_ATTRIBUTE = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_MEDIA_PROVIDER_FILES_TABLE = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN; public static final boolean HAS_SURFACE_TEXTURE_RECORDING = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_ACTION_BAR = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - // Ex: View.setTranslationX. - public static final boolean HAS_VIEW_TRANSFORM_PROPERTIES = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN; public static final boolean HAS_CAMERA_HDR = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1; - - public static final boolean HAS_OPTIONS_IN_MUTABLE = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean CAN_START_PREVIEW_IN_JPEG_CALLBACK = - Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH; - - public static final boolean HAS_VIEW_PROPERTY_ANIMATOR = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB_MR1; - - public static final boolean HAS_POST_ON_ANIMATION = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1; public static final boolean HAS_ANNOUNCE_FOR_ACCESSIBILITY = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_OBJECT_ANIMATION = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - - public static final boolean HAS_GLES20_REQUIRED = - Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB; - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN; public static final boolean HAS_ROTATION_ANIMATION = - hasField(WindowManager.LayoutParams.class, "rotationAnimation"); + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2; public static final boolean HAS_ORIENTATION_LOCK = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2; - - public static final boolean HAS_CANCELLATION_SIGNAL = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN; - - public static final boolean HAS_MEDIA_MUXER = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2; - + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR2; public static final boolean HAS_DISPLAY_LISTENER = - Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1; + Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1; + + public static final boolean HAS_HIDEYBARS = isKitKatOrHigher(); public static int getIntFieldIfExists(Class<?> klass, String fieldName, Class<?> obj, int defaultVal) { @@ -202,33 +54,9 @@ public class ApiHelper { } } - private static boolean hasField(Class<?> klass, String fieldName) { - try { - klass.getDeclaredField(fieldName); - return true; - } catch (NoSuchFieldException e) { - return false; - } - } - - private static boolean hasMethod(String className, String methodName, - Class<?>... parameterTypes) { - try { - Class<?> klass = Class.forName(className); - klass.getDeclaredMethod(methodName, parameterTypes); - return true; - } catch (Throwable th) { - return false; - } - } - - private static boolean hasMethod( - Class<?> klass, String methodName, Class<?> ... paramTypes) { - try { - klass.getDeclaredMethod(methodName, paramTypes); - return true; - } catch (NoSuchMethodException e) { - return false; - } + public static boolean isKitKatOrHigher() { + // TODO: Remove CODENAME check as soon as VERSION_CODES.KITKAT is final. + return Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT + || "KeyLimePie".equals(Build.VERSION.CODENAME); } } diff --git a/src/com/android/camera/util/CameraUtil.java b/src/com/android/camera/util/CameraUtil.java index adaaaa776..2ca32f0bb 100644 --- a/src/com/android/camera/util/CameraUtil.java +++ b/src/com/android/camera/util/CameraUtil.java @@ -16,16 +16,6 @@ package com.android.camera.util; -import java.io.Closeable; -import java.io.IOException; -import java.lang.reflect.Method; -import java.text.SimpleDateFormat; -import java.util.Date; -import java.util.List; -import java.util.Locale; -import java.util.StringTokenizer; - -import android.annotation.TargetApi; import android.app.Activity; import android.app.AlertDialog; import android.app.admin.DevicePolicyManager; @@ -47,15 +37,12 @@ import android.hardware.Camera.Parameters; import android.hardware.Camera.Size; import android.location.Location; import android.net.Uri; -import android.os.Build; import android.os.Handler; import android.os.ParcelFileDescriptor; import android.telephony.TelephonyManager; import android.util.DisplayMetrics; -import android.util.FloatMath; import android.util.Log; import android.util.TypedValue; -import android.view.Display; import android.view.OrientationEventListener; import android.view.Surface; import android.view.View; @@ -66,11 +53,19 @@ import android.widget.Toast; import com.android.camera.CameraActivity; import com.android.camera.CameraDisabledException; -import com.android.camera.CameraHardwareException; import com.android.camera.CameraHolder; import com.android.camera.CameraManager; import com.android.camera2.R; +import java.io.Closeable; +import java.io.IOException; +import java.lang.reflect.Method; +import java.text.SimpleDateFormat; +import java.util.Date; +import java.util.List; +import java.util.Locale; +import java.util.StringTokenizer; + /** * Collection of utility functions used in this package. */ @@ -138,22 +133,14 @@ public class CameraUtil { return (supported != null) && supported.contains(SCENE_MODE_HDR); } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) public static boolean isMeteringAreaSupported(Parameters params) { - if (ApiHelper.HAS_CAMERA_METERING_AREA) { - return params.getMaxNumMeteringAreas() > 0; - } - return false; + return params.getMaxNumMeteringAreas() > 0; } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) public static boolean isFocusAreaSupported(Parameters params) { - if (ApiHelper.HAS_CAMERA_FOCUS_AREA) { - return (params.getMaxNumFocusAreas() > 0 - && isSupported(Parameters.FOCUS_MODE_AUTO, - params.getSupportedFocusModes())); - } - return false; + return (params.getMaxNumFocusAreas() > 0 + && isSupported(Parameters.FOCUS_MODE_AUTO, + params.getSupportedFocusModes())); } // Private intent extras. Test only. @@ -325,15 +312,12 @@ public class CameraUtil { } } - @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH) private static void throwIfCameraDisabled(Activity activity) throws CameraDisabledException { // Check if device policy has disabled the camera. - if (ApiHelper.HAS_GET_CAMERA_DISABLED) { - DevicePolicyManager dpm = (DevicePolicyManager) activity.getSystemService( - Context.DEVICE_POLICY_SERVICE); - if (dpm.getCameraDisabled(null)) { - throw new CameraDisabledException(); - } + DevicePolicyManager dpm = (DevicePolicyManager) activity.getSystemService( + Context.DEVICE_POLICY_SERVICE); + if (dpm.getCameraDisabled(null)) { + throw new CameraDisabledException(); } } @@ -395,7 +379,7 @@ public class CameraUtil { public static float distance(float x, float y, float sx, float sy) { float dx = x - sx; float dy = y - sy; - return FloatMath.sqrt(dx * dx + dy * dy); + return (float) Math.sqrt(dx * dx + dy * dy); } public static int clamp(int x, int min, int max) { @@ -452,15 +436,8 @@ public class CameraUtil { return orientationHistory; } - @SuppressWarnings("deprecation") - @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) private static Point getDefaultDisplaySize(Activity activity, Point size) { - Display d = activity.getWindowManager().getDefaultDisplay(); - if (Build.VERSION.SDK_INT >= ApiHelper.VERSION_CODES.HONEYCOMB_MR2) { - d.getSize(size); - } else { - size.set(d.getWidth(), d.getHeight()); - } + activity.getWindowManager().getDefaultDisplay().getSize(size); return size; } diff --git a/src/com/android/camera/util/MotionEventHelper.java b/src/com/android/camera/util/MotionEventHelper.java index eabaeab3b..001f83c41 100644 --- a/src/com/android/camera/util/MotionEventHelper.java +++ b/src/com/android/camera/util/MotionEventHelper.java @@ -1,10 +1,7 @@ package com.android.camera.util; -import android.annotation.TargetApi; import android.graphics.Matrix; -import android.util.FloatMath; import android.view.MotionEvent; -import android.view.MotionEvent.PointerCoords; public final class MotionEventHelper { private MotionEventHelper() {} @@ -12,92 +9,12 @@ public final class MotionEventHelper { public static MotionEvent transformEvent(MotionEvent e, Matrix m) { // We try to use the new transform method if possible because it uses // less memory. - if (ApiHelper.HAS_MOTION_EVENT_TRANSFORM) { - return transformEventNew(e, m); - } else { - return transformEventOld(e, m); - } + return transformEventNew(e, m); } - @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) private static MotionEvent transformEventNew(MotionEvent e, Matrix m) { MotionEvent newEvent = MotionEvent.obtain(e); newEvent.transform(m); return newEvent; } - - // This is copied from Input.cpp in the android framework. - private static MotionEvent transformEventOld(MotionEvent e, Matrix m) { - long downTime = e.getDownTime(); - long eventTime = e.getEventTime(); - int action = e.getAction(); - int pointerCount = e.getPointerCount(); - int[] pointerIds = getPointerIds(e); - PointerCoords[] pointerCoords = getPointerCoords(e); - int metaState = e.getMetaState(); - float xPrecision = e.getXPrecision(); - float yPrecision = e.getYPrecision(); - int deviceId = e.getDeviceId(); - int edgeFlags = e.getEdgeFlags(); - int source = e.getSource(); - int flags = e.getFlags(); - - // Copy the x and y coordinates into an array, map them, and copy back. - float[] xy = new float[pointerCoords.length * 2]; - for (int i = 0; i < pointerCount;i++) { - xy[2 * i] = pointerCoords[i].x; - xy[2 * i + 1] = pointerCoords[i].y; - } - m.mapPoints(xy); - for (int i = 0; i < pointerCount;i++) { - pointerCoords[i].x = xy[2 * i]; - pointerCoords[i].y = xy[2 * i + 1]; - pointerCoords[i].orientation = transformAngle( - m, pointerCoords[i].orientation); - } - - MotionEvent n = MotionEvent.obtain(downTime, eventTime, action, - pointerCount, pointerIds, pointerCoords, metaState, xPrecision, - yPrecision, deviceId, edgeFlags, source, flags); - - return n; - } - - private static int[] getPointerIds(MotionEvent e) { - int n = e.getPointerCount(); - int[] r = new int[n]; - for (int i = 0; i < n; i++) { - r[i] = e.getPointerId(i); - } - return r; - } - - private static PointerCoords[] getPointerCoords(MotionEvent e) { - int n = e.getPointerCount(); - PointerCoords[] r = new PointerCoords[n]; - for (int i = 0; i < n; i++) { - r[i] = new PointerCoords(); - e.getPointerCoords(i, r[i]); - } - return r; - } - - private static float transformAngle(Matrix m, float angleRadians) { - // Construct and transform a vector oriented at the specified clockwise - // angle from vertical. Coordinate system: down is increasing Y, right is - // increasing X. - float[] v = new float[2]; - v[0] = FloatMath.sin(angleRadians); - v[1] = -FloatMath.cos(angleRadians); - m.mapVectors(v); - - // Derive the transformed vector's clockwise angle from vertical. - float result = (float) Math.atan2(v[0], -v[1]); - if (result < -Math.PI / 2) { - result += Math.PI; - } else if (result > Math.PI / 2) { - result -= Math.PI; - } - return result; - } } |