summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorEino-Ville Talvala <etalvala@google.com>2011-08-19 18:14:06 -0700
committerEino-Ville Talvala <etalvala@google.com>2011-09-06 13:32:46 -0700
commit9efb0b19fa929c528253f0a9b6f5b27791e1ee52 (patch)
tree74e44529a5682ea73905cff667a65eacb51c745e
parentfc404e9a87e2f7e1fa9bc0ede998ed9cf75a047d (diff)
downloadpackages_apps_LegacyCamera-9efb0b19fa929c528253f0a9b6f5b27791e1ee52.tar.gz
packages_apps_LegacyCamera-9efb0b19fa929c528253f0a9b6f5b27791e1ee52.tar.bz2
packages_apps_LegacyCamera-9efb0b19fa929c528253f0a9b6f5b27791e1ee52.zip
Add video recording with effects into Camera.
Initial support for video recording in the Camera app, with the goofyface and background replacement effects. Bug: 5212166 Change-Id: I30995e305c202ba57bf76ff18c885bf16c5ea14f
-rw-r--r--Android.mk2
-rw-r--r--AndroidManifest.xml1
-rw-r--r--res/layout-w1024dp/preview_frame_video.xml10
-rw-r--r--res/layout/preview_frame_video.xml10
-rw-r--r--res/raw/backdropper.graph83
-rw-r--r--res/raw/goofy_face.graph100
-rw-r--r--res/values/arrays.xml20
-rw-r--r--res/values/strings.xml10
-rw-r--r--src/com/android/camera/CameraSettings.java71
-rw-r--r--src/com/android/camera/EffectsRecorder.java565
-rw-r--r--src/com/android/camera/VideoCamera.java305
11 files changed, 1137 insertions, 40 deletions
diff --git a/Android.mk b/Android.mk
index 43db9772..616047c8 100644
--- a/Android.mk
+++ b/Android.mk
@@ -6,7 +6,7 @@ LOCAL_MODULE_TAGS := optional
LOCAL_SRC_FILES := $(call all-java-files-under, src)
LOCAL_PACKAGE_NAME := Camera
-LOCAL_SDK_VERSION := current
+#LOCAL_SDK_VERSION := current
LOCAL_JNI_SHARED_LIBRARIES := libjni_mosaic
diff --git a/AndroidManifest.xml b/AndroidManifest.xml
index 53fb0468..61088eca 100644
--- a/AndroidManifest.xml
+++ b/AndroidManifest.xml
@@ -23,6 +23,7 @@
android:taskAffinity=""
android:theme="@style/ThemeCamera"
android:hardwareAccelerated="true">
+ <uses-library android:name="com.google.android.media.effects" android:required="false" />
<receiver android:name="com.android.camera.CameraButtonIntentReceiver">
<intent-filter>
<action android:name="android.intent.action.CAMERA_BUTTON"/>
diff --git a/res/layout-w1024dp/preview_frame_video.xml b/res/layout-w1024dp/preview_frame_video.xml
index 8760ff06..98d9ce28 100644
--- a/res/layout-w1024dp/preview_frame_video.xml
+++ b/res/layout-w1024dp/preview_frame_video.xml
@@ -41,6 +41,16 @@
android:drawablePadding="5dp"
android:visibility="gone"/>
</LinearLayout>
+ <TextView android:id="@+id/bg_replace_message"
+ android:layout_width="300dp"
+ android:layout_height="wrap_content"
+ android:layout_centerInParent="true"
+ android:padding="32dp"
+ android:layout_margin="24dp"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:visibility="gone"
+ android:background="@drawable/bg_text_on_preview"
+ android:text="@string/bg_replacement_message" />
<ImageButton android:id="@+id/btn_play"
style="@style/ReviewControlIcon"
android:layout_centerInParent="true"
diff --git a/res/layout/preview_frame_video.xml b/res/layout/preview_frame_video.xml
index baabe0af..3dc8299e 100644
--- a/res/layout/preview_frame_video.xml
+++ b/res/layout/preview_frame_video.xml
@@ -57,5 +57,15 @@
android:visibility="gone"
android:background="@android:color/black"/>
<include layout="@layout/indicator_bar"/>
+ <TextView android:id="@+id/bg_replace_message"
+ android:layout_width="300dp"
+ android:layout_height="wrap_content"
+ android:layout_centerInParent="true"
+ android:padding="32dp"
+ android:layout_margin="24dp"
+ android:textAppearance="?android:attr/textAppearanceMedium"
+ android:background="@drawable/bg_text_on_preview"
+ android:visibility="gone"
+ android:text="@string/bg_replacement_message" />
</RelativeLayout>
</com.android.camera.PreviewFrameLayout>
diff --git a/res/raw/backdropper.graph b/res/raw/backdropper.graph
new file mode 100644
index 00000000..e3fe877c
--- /dev/null
+++ b/res/raw/backdropper.graph
@@ -0,0 +1,83 @@
+//
+// Copyright (C) 2011 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Imports ---------------------------------------------------
+@import android.filterpacks.base;
+@import android.filterpacks.ui;
+@import android.filterpacks.videosrc;
+@import android.filterpacks.videoproc;
+@import android.filterpacks.videosink;
+
+@setting autoBranch = "synced";
+
+// Externals -------------------------------------------------
+
+@external textureSourceCallback;
+@external recordingWidth;
+@external recordingHeight;
+@external recordingProfile;
+@external audioSource;
+
+@external previewSurface;
+@external previewWidth;
+@external previewHeight;
+
+@external learningDoneListener;
+
+// Filters ---------------------------------------------------
+
+// Camera input
+@filter SurfaceTextureSource source {
+ sourceListener = $textureSourceCallback;
+ width = $recordingWidth;
+ height = $recordingHeight;
+ closeOnTimeout = true;
+}
+
+// Background video input
+@filter MediaSource background {
+ sourceUrl = "no_file_specified";
+ waitForNewFrame = false;
+ sourceIsUrl = true;
+}
+
+// Background replacer
+@filter BackDropperFilter replacer {
+ autowbToggle = 1;
+ learningDoneListener = $learningDoneListener;
+}
+
+// Display output
+@filter SurfaceTargetFilter display {
+ surface = $previewSurface;
+ owidth = $previewWidth;
+ oheight = $previewHeight;
+}
+
+// Recording output
+@filter MediaEncoderFilter recorder {
+ audioSource = $audioSource;
+ recordingProfile = $recordingProfile;
+ recording = false;
+ // outputFile, orientationHint, listeners will be set when recording starts
+}
+
+// Connections -----------------------------------------------
+@connect source[video] => replacer[video];
+@connect background[video] => replacer[background];
+@connect replacer[video] => display[frame];
+@connect replacer[video] => recorder[videoframe];
+
diff --git a/res/raw/goofy_face.graph b/res/raw/goofy_face.graph
new file mode 100644
index 00000000..344e1375
--- /dev/null
+++ b/res/raw/goofy_face.graph
@@ -0,0 +1,100 @@
+//
+// Copyright (C) 2011 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+// Imports ---------------------------------------------------
+@import android.filterpacks.videosrc;
+@import android.filterpacks.videosink;
+@import android.filterpacks.ui;
+@import android.filterpacks.base;
+@import android.filterpacks.imageproc;
+
+@import com.google.android.filterpacks.facedetect;
+
+@setting autoBranch = "synced";
+
+// Externals -------------------------------------------------
+
+@external textureSourceCallback;
+@external recordingWidth;
+@external recordingHeight;
+@external recordingProfile;
+@external audioSource;
+
+@external previewSurface;
+@external previewWidth;
+@external previewHeight;
+
+// Filters ---------------------------------------------------
+
+// Camera input
+@filter SurfaceTextureSource source {
+ sourceListener = $textureSourceCallback;
+ width = $recordingWidth;
+ height = $recordingHeight;
+ closeOnTimeout = true;
+}
+
+// Face detection
+@filter ToPackedGrayFilter toPackedGray {
+ owidth = 320;
+ oheight = 240;
+}
+
+@filter MultiFaceTrackerFilter faceTracker {
+ numChannelsDetector = 3;
+ quality = 0.0f;
+ smoothness = 0.2f;
+ minEyeDist = 25.0f;
+ rollRange = 45.0f;
+ numSkipFrames = 9;
+ trackingError = 1.0;
+ mouthOnlySmoothing = 0;
+ useAffineCorrection = 1;
+ patchSize = 15;
+}
+
+// Goofyface
+@filter GoofyRenderFilter goofyrenderer {
+ distortionAmount = 1.0;
+}
+
+// Display output
+@filter SurfaceTargetFilter display {
+ surface = $previewSurface;
+ owidth = $previewWidth;
+ oheight = $previewHeight;
+}
+
+// Recording output
+@filter MediaEncoderFilter recorder {
+ audioSource = $audioSource;
+ recordingProfile = $recordingProfile;
+ recording = false;
+ // outputFile, orientationHint, listeners will be set when recording starts
+}
+
+// Connections -----------------------------------------------
+// camera -> faceTracker
+@connect source[video] => toPackedGray[image];
+@connect toPackedGray[image] => faceTracker[image];
+// camera -> goofy
+@connect source[video] => goofyrenderer[image];
+// faceTracker -> goofy
+@connect faceTracker[faces] => goofyrenderer[faces];
+// goofy -> display out
+@connect goofyrenderer[outimage] => display[frame];
+// goofy -> record
+@connect goofyrenderer[outimage] => recorder[videoframe];
diff --git a/res/values/arrays.xml b/res/values/arrays.xml
index 753b629f..4006a7e0 100644
--- a/res/values/arrays.xml
+++ b/res/values/arrays.xml
@@ -249,15 +249,21 @@
<item>@string/effect_goofy_face_small_mouth</item>
<item>@string/effect_goofy_face_big_nose</item>
<item>@string/effect_goofy_face_small_eyes</item>
+ <item>@string/effect_backdropper_space</item>
+ <item>@string/effect_backdropper_beach</item>
+ <item>@string/effect_backdropper_gallery</item>
</string-array>
<string-array name="pref_video_effect_entryvalues" translatable="false">
- <item>-1</item>
- <item>0</item>
- <item>1</item>
- <item>2</item>
- <item>3</item>
- <item>4</item>
- <item>5</item>
+ <item>none</item>
+ <item>goofy_face/squeeze</item>
+ <item>goofy_face/big_eyes</item>
+ <item>goofy_face/big_mouth</item>
+ <item>goofy_face/small_mouth</item>
+ <item>goofy_face/big_nose</item>
+ <item>goofy_face/small_eyes</item>
+ <item>backdropper/file:///system/media/video/AndroidinSpace15s100.mp4</item>
+ <item>backdropper/file:///system/media/video/beach_100k.mp4</item>
+ <item>backdropper/gallery</item>
</string-array>
</resources>
diff --git a/res/values/strings.xml b/res/values/strings.xml
index a385a970..617c1825 100644
--- a/res/values/strings.xml
+++ b/res/values/strings.xml
@@ -309,4 +309,14 @@
<string name="effect_goofy_face_big_nose">Big Nose</string>
<!-- Effect setting item that makes eyes small. [CHAR LIMIT=14] -->
<string name="effect_goofy_face_small_eyes">Small Eyes</string>
+ <!-- Effect setting item that replaces background with space. [CHAR LIMIT=14] -->
+ <string name="effect_backdropper_space">In Space</string>
+ <!-- Effect setting item that replaces background with the beach. [CHAR LIMIT=14] -->
+ <string name="effect_backdropper_beach">On the Beach</string>
+ <!-- Effect setting item that replaces background with video from gallery. [CHAR LIMIT=14] -->
+ <string name="effect_backdropper_gallery">Pick your own</string>
+
+ <!-- Message displayed in overlay during background replacement training [CHAR LIMIT=180]-->
+ <string name="bg_replacement_message">Place your device on a steady surface and be sure there\'s no movement behind you.\n\nThen step out of the camera\'s view.</string>
+
</resources>
diff --git a/src/com/android/camera/CameraSettings.java b/src/com/android/camera/CameraSettings.java
index 27dfac1c..a465a19c 100644
--- a/src/com/android/camera/CameraSettings.java
+++ b/src/com/android/camera/CameraSettings.java
@@ -180,7 +180,10 @@ public class CameraSettings {
if (cameraIdPref != null) buildCameraId(group, cameraIdPref);
if (timeLapseInterval != null) resetIfInvalid(timeLapseInterval);
- if (videoEffect != null) resetIfInvalid(videoEffect);
+ if (videoEffect != null) {
+ initVideoEffect(group, videoEffect);
+ resetIfInvalid(videoEffect);
+ }
}
private void buildExposureCompensation(
@@ -379,6 +382,52 @@ public class CameraSettings {
return 0;
}
+ public static int readEffectType(SharedPreferences pref) {
+ String effectSelection = pref.getString(KEY_VIDEO_EFFECT, "none");
+ if (effectSelection.equals("none")) {
+ return EffectsRecorder.EFFECT_NONE;
+ } else if (effectSelection.startsWith("goofy_face")) {
+ return EffectsRecorder.EFFECT_GOOFY_FACE;
+ } else if (effectSelection.startsWith("backdropper")) {
+ return EffectsRecorder.EFFECT_BACKDROPPER;
+ }
+ Log.e(TAG, "Invalid effect selection: " + effectSelection);
+ return EffectsRecorder.EFFECT_NONE;
+ }
+
+ public static Object readEffectParameter(SharedPreferences pref) {
+ String effectSelection = pref.getString(KEY_VIDEO_EFFECT, "none");
+ if (effectSelection.equals("none")) {
+ return null;
+ }
+ int separatorIndex = effectSelection.indexOf('/');
+ String effectParameter =
+ effectSelection.substring(separatorIndex + 1);
+ if (effectSelection.startsWith("goofy_face")) {
+ if (effectParameter.equals("squeeze")) {
+ return EffectsRecorder.EFFECT_GF_SQUEEZE;
+ } else if (effectParameter.equals("big_eyes")) {
+ return EffectsRecorder.EFFECT_GF_BIG_EYES;
+ } else if (effectParameter.equals("big_mouth")) {
+ return EffectsRecorder.EFFECT_GF_BIG_MOUTH;
+ } else if (effectParameter.equals("small_mouth")) {
+ return EffectsRecorder.EFFECT_GF_SMALL_MOUTH;
+ } else if (effectParameter.equals("big_nose")) {
+ return EffectsRecorder.EFFECT_GF_BIG_NOSE;
+ } else if (effectParameter.equals("small_eyes")) {
+ return EffectsRecorder.EFFECT_GF_SMALL_EYES;
+ }
+ } else if (effectSelection.startsWith("backdropper")) {
+ // Parameter is a string that either encodes the URI to use,
+ // or specifies 'gallery'.
+ return effectParameter;
+ }
+
+ Log.e(TAG, "Invalid effect selection: " + effectSelection);
+ return null;
+ }
+
+
public static void restorePreferences(Context context,
ComboPreferences preferences, Parameters parameters) {
int currentCameraId = readPreferredCameraId(preferences);
@@ -440,4 +489,24 @@ public class CameraSettings {
videoQuality.filterUnsupported(supported);
}
}
+
+ private void initVideoEffect(PreferenceGroup group, ListPreference videoEffect) {
+ CharSequence[] values = videoEffect.getEntryValues();
+
+ boolean goofyFaceSupported = EffectsRecorder.isEffectSupported(EffectsRecorder.EFFECT_GOOFY_FACE);
+ boolean backdropperSupported =
+ EffectsRecorder.isEffectSupported(EffectsRecorder.EFFECT_BACKDROPPER) &&
+ mParameters.isAutoExposureLockSupported() &&
+ mParameters.isAutoWhiteBalanceLockSupported();
+
+ ArrayList<String> supported = new ArrayList<String>();
+ for (CharSequence value : values) {
+ String effectSelection = value.toString();
+ if (!goofyFaceSupported && effectSelection.startsWith("goofy_face")) continue;
+ if (!backdropperSupported && effectSelection.startsWith("backdropper")) continue;
+ supported.add(effectSelection);
+ }
+
+ filterUnsupportedOptions(group, videoEffect, supported);
+ }
}
diff --git a/src/com/android/camera/EffectsRecorder.java b/src/com/android/camera/EffectsRecorder.java
new file mode 100644
index 00000000..d17a92f6
--- /dev/null
+++ b/src/com/android/camera/EffectsRecorder.java
@@ -0,0 +1,565 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy of
+ * the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations under
+ * the License.
+ */
+
+package com.android.camera;
+
+import android.content.Context;
+import android.filterfw.GraphEnvironment;
+import android.filterfw.core.Filter;
+import android.filterfw.core.GraphRunner;
+import android.filterfw.core.GraphRunner.OnRunnerDoneListener;
+import android.filterpacks.videosrc.SurfaceTextureSource.SurfaceTextureSourceListener;
+import android.filterpacks.videoproc.BackDropperFilter;
+import android.filterpacks.videoproc.BackDropperFilter.LearningDoneListener;
+
+import android.graphics.SurfaceTexture;
+import android.hardware.Camera;
+import android.media.MediaRecorder;
+import android.media.CamcorderProfile;
+import android.os.ConditionVariable;
+import android.os.Handler;
+import android.os.Looper;
+import android.os.ParcelFileDescriptor;
+import android.util.Log;
+import android.view.Surface;
+import android.view.SurfaceHolder;
+
+import java.io.IOException;
+import java.lang.Runnable;
+
+/**
+ * Encapsulates the mobile filter framework components needed to record video with
+ * effects applied. Modeled after MediaRecorder.
+ */
+public class EffectsRecorder {
+
+ public static final int EFFECT_NONE = 0;
+ public static final int EFFECT_GOOFY_FACE = 1;
+ public static final int EFFECT_BACKDROPPER = 2;
+
+ public static final int EFFECT_GF_SQUEEZE = 0;
+ public static final int EFFECT_GF_BIG_EYES = 1;
+ public static final int EFFECT_GF_BIG_MOUTH = 2;
+ public static final int EFFECT_GF_SMALL_MOUTH = 3;
+ public static final int EFFECT_GF_BIG_NOSE = 4;
+ public static final int EFFECT_GF_SMALL_EYES = 5;
+
+ public static final int EFFECT_MSG_STARTED_LEARNING = 0;
+ public static final int EFFECT_MSG_DONE_LEARNING = 1;
+ public static final int EFFECT_MSG_STOPPING_EFFECT = 2;
+
+ private Context mContext;
+ private Handler mHandler;
+ private boolean mReleased;
+
+ private Camera mCameraDevice;
+ private CamcorderProfile mProfile;
+ private SurfaceHolder mPreviewSurfaceHolder;
+ private int mPreviewWidth;
+ private int mPreviewHeight;
+ private MediaRecorder.OnInfoListener mInfoListener;
+ private MediaRecorder.OnErrorListener mErrorListener;
+
+ private String mOutputFile;
+ private int mOrientationHint = 0;
+
+ private int mEffect = EFFECT_NONE;
+ private int mCurrentEffect = EFFECT_NONE;
+ private EffectsListener mEffectsListener;
+
+ private Object mEffectParameter;
+
+ private GraphEnvironment mGraphEnv;
+ private int mGraphId;
+ private GraphRunner mRunner;
+
+ private SurfaceTexture mTextureSource;
+
+ private static final int STATE_CONFIGURE = 0;
+ private static final int STATE_WAITING_FOR_SURFACE = 1;
+ private static final int STATE_PREVIEW = 2;
+ private static final int STATE_RECORD = 3;
+ private static final int STATE_RELEASED = 4;
+ private int mState = STATE_CONFIGURE;
+
+ private boolean mLogVerbose = Log.isLoggable(TAG, Log.VERBOSE);
+ private static final String TAG = "effectsrecorder";
+
+ /** Determine if a given effect is supported at runtime
+ * Some effects require libraries not available on all devices
+ */
+ public static boolean isEffectSupported(int effectId) {
+ switch (effectId) {
+ case EFFECT_GOOFY_FACE:
+ return Filter.isAvailable("com.google.android.filterpacks.facedetect.GoofyRenderFilter");
+ case EFFECT_BACKDROPPER:
+ return Filter.isAvailable("android.filterpacks.videoproc.BackDropperFilter");
+ default:
+ return false;
+ }
+ }
+
+ public EffectsRecorder(Context context) {
+ if (mLogVerbose) Log.v(TAG, "EffectsRecorder created (" + this + ")");
+ mContext = context;
+ mHandler = new Handler(Looper.getMainLooper());
+ }
+
+ public void setCamera(Camera cameraDevice) {
+ switch (mState) {
+ case STATE_PREVIEW:
+ throw new RuntimeException("setCamera cannot be called while previewing!");
+ case STATE_RECORD:
+ throw new RuntimeException("setCamera cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setCamera called on an already released recorder!");
+ default:
+ break;
+ }
+
+ mCameraDevice = cameraDevice;
+ }
+
+ public void setProfile(CamcorderProfile profile) {
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setProfile cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setProfile called on an already released recorder!");
+ default:
+ break;
+ }
+ mProfile = profile;
+ }
+
+ public void setOutputFile(String outputFile) {
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setOutputFile cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setOutputFile called on an already released recorder!");
+ default:
+ break;
+ }
+
+ mOutputFile = outputFile;
+ }
+
+ public void setPreviewDisplay(SurfaceHolder previewSurfaceHolder,
+ int previewWidth,
+ int previewHeight) {
+ if (mLogVerbose) Log.v(TAG, "setPreviewDisplay (" + this + ")");
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setPreviewDisplay cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setPreviewDisplay called on an already released recorder!");
+ default:
+ break;
+ }
+
+ mPreviewSurfaceHolder = previewSurfaceHolder;
+ mPreviewWidth = previewWidth;
+ mPreviewHeight = previewHeight;
+
+ switch (mState) {
+ case STATE_WAITING_FOR_SURFACE:
+ startPreview();
+ break;
+ case STATE_PREVIEW:
+ initializeEffect(true);
+ break;
+ }
+ }
+
+ public void setEffect(int effect, Object effectParameter) {
+ if (mLogVerbose) Log.v(TAG,
+ "Setting effect ID to " + effect +
+ ", parameter to " + effectParameter.toString() );
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setEffect cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setEffect called on an already released recorder!");
+ default:
+ break;
+ }
+
+ mEffect = effect;
+ mEffectParameter = effectParameter;
+
+ if (mState == STATE_PREVIEW) {
+ initializeEffect(false);
+ }
+ }
+
+ public interface EffectsListener {
+ public void onEffectsUpdate(int effectId, int effectMsg);
+ }
+
+ public void setEffectsListener(EffectsListener listener) {
+ mEffectsListener = listener;
+ }
+
+ public void setOrientationHint(int degrees) {
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setEffect cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setEffect called on an already released recorder!");
+ default:
+ break;
+ }
+
+ mOrientationHint = degrees;
+ }
+
+ public void setOnInfoListener(MediaRecorder.OnInfoListener infoListener) {
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setInfoListener cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setInfoListener called on an already released recorder!");
+ default:
+ break;
+ }
+ mInfoListener = infoListener;
+ }
+
+ public void setOnErrorListener(MediaRecorder.OnErrorListener errorListener) {
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("setErrorListener cannot be called while recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setErrorListener called on an already released recorder!");
+ default:
+ break;
+ }
+ mErrorListener = errorListener;
+ }
+
+ public void initializeFilterFramework() {
+ mGraphEnv = new GraphEnvironment();
+ mGraphEnv.createGLEnvironment();
+
+ mGraphEnv.addReferences(
+ "textureSourceCallback", mSourceReadyCallback,
+ "recordingWidth", mProfile.videoFrameWidth,
+ "recordingHeight", mProfile.videoFrameHeight,
+ "recordingProfile", mProfile,
+ "audioSource", MediaRecorder.AudioSource.CAMCORDER,
+ "learningDoneListener", mLearningDoneListener);
+
+ mRunner = null;
+ mGraphId = -1;
+ mCurrentEffect = EFFECT_NONE;
+ }
+
+ public synchronized void initializeEffect(boolean forceReset) {
+ if (forceReset ||
+ mCurrentEffect != mEffect ||
+ mCurrentEffect == EFFECT_BACKDROPPER) {
+ mGraphEnv.addReferences(
+ "previewSurface", mPreviewSurfaceHolder.getSurface(),
+ "previewWidth", mPreviewWidth,
+ "previewHeight", mPreviewHeight);
+
+ if (mState == STATE_PREVIEW) {
+ // Switching effects while running. Stop existing runner.
+ // The stop callback will take care of starting new runner.
+ sendMessage(mCurrentEffect, EFFECT_MSG_STOPPING_EFFECT);
+ mRunner.stop();
+ }
+ switch (mEffect) {
+ case EFFECT_GOOFY_FACE:
+ mGraphId = mGraphEnv.loadGraph(mContext, R.raw.goofy_face);
+ break;
+ case EFFECT_BACKDROPPER:
+ sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_STARTED_LEARNING);
+ mGraphId = mGraphEnv.loadGraph(mContext, R.raw.backdropper);
+ break;
+ default:
+ throw new RuntimeException("Unknown effect ID" + mEffect + "!");
+ }
+ mCurrentEffect = mEffect;
+
+ mRunner = mGraphEnv.getRunner(mGraphId, GraphEnvironment.MODE_ASYNCHRONOUS);
+ mRunner.setDoneCallback(mRunnerDoneCallback);
+ }
+
+ switch (mCurrentEffect) {
+ case EFFECT_GOOFY_FACE:
+ Filter goofyFilter = mRunner.getGraph().getFilter("goofyrenderer");
+ goofyFilter.setInputValue("currentEffect",
+ ((Integer)mEffectParameter).intValue());
+ break;
+ case EFFECT_BACKDROPPER:
+ Filter backgroundSrc = mRunner.getGraph().getFilter("background");
+ backgroundSrc.setInputValue("sourceUrl",
+ (String)mEffectParameter);
+ break;
+ default:
+ break;
+ }
+
+ }
+
+ public void startPreview() {
+ if (mLogVerbose) Log.v(TAG, "Starting preview (" + this + ")");
+
+ switch (mState) {
+ case STATE_PREVIEW:
+ // Already running preview
+ Log.w(TAG, "startPreview called when already running preview");
+ return;
+ case STATE_RECORD:
+ throw new RuntimeException("Cannot start preview when already recording!");
+ case STATE_RELEASED:
+ throw new RuntimeException("setEffect called on an already released recorder!");
+ default:
+ break;
+ }
+
+ if (mEffect == EFFECT_NONE) {
+ throw new RuntimeException("No effect selected!");
+ }
+ if (mEffectParameter == null) {
+ throw new RuntimeException("No effect parameter provided!");
+ }
+ if (mProfile == null) {
+ throw new RuntimeException("No recording profile provided!");
+ }
+ if (mPreviewSurfaceHolder == null) {
+ if (mLogVerbose) Log.v(TAG, "Passed a null surface holder; waiting for valid one");
+ mState = STATE_WAITING_FOR_SURFACE;
+ return;
+ }
+ if (mCameraDevice == null) {
+ throw new RuntimeException("No camera to record from!");
+ }
+
+ if (mLogVerbose) Log.v(TAG, "Initializing filter graph");
+
+ initializeFilterFramework();
+
+ initializeEffect(true);
+
+ if (mLogVerbose) Log.v(TAG, "Starting filter graph");
+
+ mRunner.run();
+ // Rest of preview startup handled in mSourceReadyCallback
+ }
+
+ private SurfaceTextureSourceListener mSourceReadyCallback =
+ new SurfaceTextureSourceListener() {
+ public void onSurfaceTextureSourceReady(SurfaceTexture source) {
+ if (mLogVerbose) Log.v(TAG, "SurfaceTexture ready callback received");
+ synchronized(EffectsRecorder.this) {
+ mTextureSource = source;
+
+ // When shutting down a graph, we receive a null SurfaceTexture to
+ // indicate that. Don't want to connect up the camera in that case.
+ if (source == null) return;
+
+ if (mState == STATE_RELEASED) return;
+
+ mCameraDevice.stopPreview();
+ if (mLogVerbose) Log.v(TAG, "Runner active, connecting effects preview");
+ try {
+ mCameraDevice.setPreviewTexture(mTextureSource);
+ } catch(IOException e) {
+ throw new RuntimeException("Unable to connect camera to effect input", e);
+ }
+
+ // Lock AE/AWB to reduce transition flicker
+ tryEnable3ALocks(true);
+
+ mCameraDevice.startPreview();
+
+ // Unlock AE/AWB after preview started
+ tryEnable3ALocks(false);
+
+ mState = STATE_PREVIEW;
+
+ if (mLogVerbose) Log.v(TAG, "Start preview/effect switch complete");
+ }
+ }
+ };
+
+ private LearningDoneListener mLearningDoneListener =
+ new LearningDoneListener() {
+ public void onLearningDone(BackDropperFilter filter) {
+ if (mLogVerbose) Log.v(TAG, "Learning done callback triggered");
+ // Called in a processing thread, so have to post message back to UI
+ // thread
+ sendMessage(EFFECT_BACKDROPPER, EFFECT_MSG_DONE_LEARNING);
+ enable3ALocks(true);
+ }
+ };
+
+ public void startRecording() {
+ if (mLogVerbose) Log.v(TAG, "Starting recording (" + this + ")");
+
+ switch (mState) {
+ case STATE_RECORD:
+ throw new RuntimeException("Already recording, cannot begin anew!");
+ case STATE_RELEASED:
+ throw new RuntimeException("startRecording called on an already released recorder!");
+ default:
+ break;
+ }
+
+ if (mOutputFile == null) {
+ throw new RuntimeException("No output file name provided!");
+ }
+
+ if (mState == STATE_CONFIGURE) {
+ startPreview();
+ }
+ Filter recorder = mRunner.getGraph().getFilter("recorder");
+ recorder.setInputValue("outputFile", mOutputFile);
+ recorder.setInputValue("orientationHint", mOrientationHint);
+ if (mInfoListener != null) {
+ recorder.setInputValue("infoListener", mInfoListener);
+ }
+ if (mErrorListener != null) {
+ recorder.setInputValue("errorListener", mErrorListener);
+ }
+ recorder.setInputValue("recording", true);
+ mState = STATE_RECORD;
+ }
+
+ public void stopRecording() {
+ if (mLogVerbose) Log.v(TAG, "Stop recording (" + this + ")");
+
+ switch (mState) {
+ case STATE_CONFIGURE:
+ case STATE_PREVIEW:
+ Log.w(TAG, "StopRecording called when recording not active!");
+ return;
+ case STATE_RELEASED:
+ throw new RuntimeException("stopRecording called on released EffectsRecorder!");
+ default:
+ break;
+ }
+ Filter recorder = mRunner.getGraph().getFilter("recorder");
+ recorder.setInputValue("recording", false);
+ mState = STATE_PREVIEW;
+ }
+
+ // Stop and release effect resources
+ public void stopPreview() {
+ if (mLogVerbose) Log.v(TAG, "Stopping preview (" + this + ")");
+
+ switch (mState) {
+ case STATE_CONFIGURE:
+ Log.w(TAG, "StopPreview called when preview not active!");
+ return;
+ case STATE_RELEASED:
+ throw new RuntimeException("stopPreview called on released EffectsRecorder!");
+ default:
+ break;
+ }
+
+ if (mState == STATE_RECORD) {
+ stopRecording();
+ }
+
+ sendMessage(mCurrentEffect, EFFECT_MSG_STOPPING_EFFECT);
+
+ mCurrentEffect = EFFECT_NONE;
+
+ mState = STATE_CONFIGURE;
+ mRunner.stop();
+
+ // Rest of stop and release handled in mRunnerDoneCallback
+ }
+
+ // Try to enable/disable 3A locks if supported; otherwise return false
+ boolean tryEnable3ALocks(boolean toggle) {
+ Camera.Parameters params = mCameraDevice.getParameters();
+ if (params.isAutoExposureLockSupported() &&
+ params.isAutoWhiteBalanceLockSupported() ) {
+ params.setAutoExposureLock(toggle);
+ params.setAutoWhiteBalanceLock(toggle);
+ mCameraDevice.setParameters(params);
+ return true;
+ }
+ return false;
+ }
+
+ // Try to enable/disable 3A locks if supported; otherwise, throw error
+ // Use this when locks are essential to success
+ void enable3ALocks(boolean toggle) {
+ Camera.Parameters params = mCameraDevice.getParameters();
+ if (!tryEnable3ALocks(toggle)) {
+ throw new RuntimeException("Attempt to lock 3A on camera with no locking support!");
+ }
+ }
+
+ private OnRunnerDoneListener mRunnerDoneCallback =
+ new OnRunnerDoneListener() {
+ public void onRunnerDone(int result) {
+ synchronized(EffectsRecorder.this) {
+ if (mState == STATE_PREVIEW) {
+ // Switching effects, start up the new runner
+ if (mLogVerbose) Log.v(TAG, "Previous effect halted, starting new effect.");
+ tryEnable3ALocks(false);
+ mRunner.run();
+ } else if (mState != STATE_RELEASED) {
+ // Shutting down effects
+ if (mLogVerbose) Log.v(TAG, "Runner halted, restoring direct preview");
+ mCameraDevice.stopPreview();
+ try {
+ mCameraDevice.setPreviewDisplay(mPreviewSurfaceHolder);
+ } catch(IOException e) {
+ throw new RuntimeException("Unable to connect camera to preview display", e);
+ }
+ mCameraDevice.startPreview();
+ } else {
+ // STATE_RELEASED - camera will be/has been released as well, do nothing.
+ }
+ }
+ }
+ };
+
+ // Indicates that all camera/recording activity needs to halt
+ public synchronized void release() {
+ if (mLogVerbose) Log.v(TAG, "Releasing (" + this + ")");
+
+ switch (mState) {
+ case STATE_RECORD:
+ case STATE_PREVIEW:
+ stopPreview();
+ mCameraDevice.stopPreview();
+ // Fall-through
+ default:
+ mState = STATE_RELEASED;
+ break;
+ }
+ }
+
+ private void sendMessage(final int effect, final int msg) {
+ if (mEffectsListener != null) {
+ mHandler.post(new Runnable() {
+ public void run() {
+ mEffectsListener.onEffectsUpdate(effect,
+ msg);
+ }
+ });
+ }
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/VideoCamera.java b/src/com/android/camera/VideoCamera.java
index da25f9d5..c30417d6 100644
--- a/src/com/android/camera/VideoCamera.java
+++ b/src/com/android/camera/VideoCamera.java
@@ -84,7 +84,8 @@ public class VideoCamera extends ActivityBase
implements CameraPreference.OnPreferenceChangedListener,
ShutterButton.OnShutterButtonListener, SurfaceHolder.Callback,
MediaRecorder.OnErrorListener, MediaRecorder.OnInfoListener,
- ModePicker.OnModeChangeListener, View.OnTouchListener {
+ ModePicker.OnModeChangeListener, View.OnTouchListener,
+ EffectsRecorder.EffectsListener {
private static final String TAG = "videocamera";
@@ -133,6 +134,9 @@ public class VideoCamera extends ActivityBase
private boolean mSnapshotInProgress = false;
private PictureCallback mJpegPictureCallback;
+ private final static String EFFECT_BG_FROM_GALLERY =
+ "gallery";
+
private android.hardware.Camera mCameraDevice;
private final CameraErrorCallback mErrorCallback = new CameraErrorCallback();
@@ -142,6 +146,8 @@ public class VideoCamera extends ActivityBase
private PreviewFrameLayout mPreviewFrameLayout;
private SurfaceHolder mSurfaceHolder = null;
private IndicatorControlContainer mIndicatorControlContainer;
+ private int mSurfaceWidth;
+ private int mSurfaceHeight;
private View mReviewControl;
private Toast mNoShareToast;
@@ -158,6 +164,7 @@ public class VideoCamera extends ActivityBase
private ModePicker mModePicker;
private ShutterButton mShutterButton;
private TextView mRecordingTimeView;
+ private TextView mBgLearningMessage;
private boolean mIsVideoCaptureIntent;
private boolean mQuickCapture;
@@ -168,6 +175,12 @@ public class VideoCamera extends ActivityBase
private long mStorageSpace;
private MediaRecorder mMediaRecorder;
+ private EffectsRecorder mEffectsRecorder;
+
+ private int mEffectType = EffectsRecorder.EFFECT_NONE;
+ private Object mEffectParameter = null;
+ private String mEffectUriFromGallery = null;
+
private boolean mMediaRecorderRecording = false;
private long mRecordingStartTime;
private boolean mRecordingTimeCountsDown = false;
@@ -411,6 +424,8 @@ public class VideoCamera extends ActivityBase
mTimeLapseLabel = findViewById(R.id.time_lapse_label);
mPreviewBorder = findViewById(R.id.preview_border);
+ mBgLearningMessage = (TextView) findViewById(R.id.bg_replace_message);
+
// Make sure preview is started.
try {
startPreviewThread.join();
@@ -671,6 +686,28 @@ public class VideoCamera extends ActivityBase
int profileQuality = getProfileQuality(mCameraId, quality, mCaptureTimeLapse);
mProfile = CamcorderProfile.get(mCameraId, profileQuality);
getDesiredPreviewSize();
+
+ // Set effect
+ mEffectType = CameraSettings.readEffectType(mPreferences);
+ if (mEffectType != EffectsRecorder.EFFECT_NONE) {
+ mEffectParameter = CameraSettings.readEffectParameter(mPreferences);
+ // When picking from gallery, mEffectParameter should have been
+ // initialized in onActivityResult. If not, fall back to no effect
+ if (mEffectType == EffectsRecorder.EFFECT_BACKDROPPER &&
+ ((String)mEffectParameter).equals(EFFECT_BG_FROM_GALLERY)) {
+ if (mEffectUriFromGallery == null) {
+ Log.w(TAG, "No URI from gallery, resetting to no effect");
+ mEffectType = EffectsRecorder.EFFECT_NONE;
+ mEffectParameter = null;
+ ComboPreferences.Editor editor = mPreferences.edit();
+ editor.putString(CameraSettings.KEY_VIDEO_EFFECT, "none");
+ editor.apply();
+ }
+ }
+ } else {
+ mEffectParameter = null;
+ }
+
}
int getProfileQuality(int cameraId, String quality, boolean captureTimeLapse) {
@@ -812,7 +849,14 @@ public class VideoCamera extends ActivityBase
private void setPreviewDisplay(SurfaceHolder holder) {
try {
- mCameraDevice.setPreviewDisplay(holder);
+ if (effectsActive() && mPreviewing) {
+ mEffectsRecorder.setPreviewDisplay(
+ mSurfaceHolder,
+ mSurfaceWidth,
+ mSurfaceHeight);
+ } else {
+ mCameraDevice.setPreviewDisplay(holder);
+ }
} catch (Throwable ex) {
closeCamera();
throw new RuntimeException("setPreviewDisplay failed", ex);
@@ -821,24 +865,36 @@ public class VideoCamera extends ActivityBase
private void startPreview() {
Log.v(TAG, "startPreview");
- mCameraDevice.setErrorCallback(mErrorCallback);
+ mCameraDevice.setErrorCallback(mErrorCallback);
if (mPreviewing == true) {
mCameraDevice.stopPreview();
+ if (effectsActive() && mEffectsRecorder != null) {
+ mEffectsRecorder.release();
+ }
mPreviewing = false;
}
- setPreviewDisplay(mSurfaceHolder);
- mDisplayRotation = Util.getDisplayRotation(this);
- int orientation = Util.getDisplayOrientation(mDisplayRotation, mCameraId);
- mCameraDevice.setDisplayOrientation(orientation);
- setCameraParameters();
+ if (!effectsActive()) {
+ setPreviewDisplay(mSurfaceHolder);
+ mDisplayRotation = Util.getDisplayRotation(this);
+ int orientation = Util.getDisplayOrientation(mDisplayRotation, mCameraId);
+ mCameraDevice.setDisplayOrientation(orientation);
+ setCameraParameters();
- try {
- mCameraDevice.startPreview();
- } catch (Throwable ex) {
- closeCamera();
- throw new RuntimeException("startPreview failed", ex);
+ try {
+ mCameraDevice.startPreview();
+ } catch (Throwable ex) {
+ closeCamera();
+ throw new RuntimeException("startPreview failed", ex);
+ }
+ } else {
+ setCameraParameters();
+
+ initializeEffectsPreview();
+ Log.v(TAG, "effectsStartPreview");
+ mEffectsRecorder.startPreview();
}
+
mZoomState = ZOOM_STOPPED;
mPreviewing = true;
}
@@ -849,6 +905,10 @@ public class VideoCamera extends ActivityBase
Log.d(TAG, "already stopped.");
return;
}
+ if (mEffectsRecorder != null) {
+ mEffectsRecorder.release();
+ }
+ mEffectType = EffectsRecorder.EFFECT_NONE;
CameraHolder.instance().release();
mCameraDevice = null;
mPreviewing = false;
@@ -968,6 +1028,8 @@ public class VideoCamera extends ActivityBase
Log.v(TAG, "surfaceChanged. w=" + w + ". h=" + h);
mSurfaceHolder = holder;
+ mSurfaceWidth = w;
+ mSurfaceHeight = h;
if (mPausing) {
// We're pausing, the screen is off and we already stopped
@@ -1150,6 +1212,73 @@ public class VideoCamera extends ActivityBase
mMediaRecorder.setOnInfoListener(this);
}
+ private void initializeEffectsPreview() {
+ Log.v(TAG, "initializeEffectsPreview");
+ // If the mCameraDevice is null, then this activity is going to finish
+ if (mCameraDevice == null) return;
+
+ mEffectsRecorder = new EffectsRecorder(this);
+
+ mEffectsRecorder.setCamera(mCameraDevice);
+ mEffectsRecorder.setProfile(mProfile);
+ mEffectsRecorder.setEffectsListener(this);
+ mEffectsRecorder.setOnInfoListener(this);
+ mEffectsRecorder.setOnErrorListener(this);
+
+ // See android.hardware.Camera.Parameters.setRotation for
+ // documentation.
+ int rotation = 0;
+ if (mOrientation != OrientationEventListener.ORIENTATION_UNKNOWN) {
+ CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
+ if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
+ rotation = (info.orientation - mOrientation + 360) % 360;
+ } else { // back-facing camera
+ rotation = (info.orientation + mOrientation) % 360;
+ }
+ }
+ mEffectsRecorder.setOrientationHint(rotation);
+ mOrientationHint = rotation;
+
+ mEffectsRecorder.setPreviewDisplay(
+ mSurfaceHolder,
+ mSurfaceWidth,
+ mSurfaceHeight);
+
+ if (mEffectType == EffectsRecorder.EFFECT_BACKDROPPER &&
+ ((String)mEffectParameter).equals(EFFECT_BG_FROM_GALLERY) ) {
+ mEffectsRecorder.setEffect(mEffectType, mEffectUriFromGallery);
+ } else {
+ mEffectsRecorder.setEffect(mEffectType, mEffectParameter);
+ }
+ }
+
+ private void initializeEffectsRecording() {
+ Log.v(TAG, "initializeEffectsRecording");
+
+ Intent intent = getIntent();
+ Bundle myExtras = intent.getExtras();
+
+ if (mIsVideoCaptureIntent && myExtras != null) {
+ Uri saveUri = (Uri) myExtras.getParcelable(MediaStore.EXTRA_OUTPUT);
+ if (saveUri != null) {
+ mVideoFilename = saveUri.toString();
+ } else {
+ mVideoFilename = null;
+ }
+ } else {
+ mVideoFilename = null;
+ }
+
+ // TODO: Timelapse
+
+ // Set output file
+ if (mVideoFilename == null) {
+ generateVideoFilename(mProfile.fileFormat);
+ }
+ mEffectsRecorder.setOutputFile(mVideoFilename);
+ }
+
+
private void releaseMediaRecorder() {
Log.v(TAG, "Releasing media recorder.");
if (mMediaRecorder != null) {
@@ -1169,6 +1298,16 @@ public class VideoCamera extends ActivityBase
}
}
+ private void releaseEffectsRecorder() {
+ Log.v(TAG, "Releasing effects recorder.");
+ if (mEffectsRecorder != null) {
+ cleanupEmptyFile();
+ mEffectsRecorder.release();
+ mEffectsRecorder = null;
+ }
+ mVideoFilename = null;
+ }
+
private void generateVideoFilename(int outputFileFormat) {
long dateTaken = System.currentTimeMillis();
String title = createName(dateTaken);
@@ -1342,23 +1481,42 @@ public class VideoCamera extends ActivityBase
return;
}
- initializeRecorder();
- if (mMediaRecorder == null) {
- Log.e(TAG, "Fail to initialize media recorder");
- return;
+ if (effectsActive()) {
+ initializeEffectsRecording();
+ if (mEffectsRecorder == null) {
+ Log.e(TAG, "Fail to initialize effect recorder");
+ return;
+ }
+ } else {
+ initializeRecorder();
+ if (mMediaRecorder == null) {
+ Log.e(TAG, "Fail to initialize media recorder");
+ return;
+ }
}
pauseAudioPlayback();
- try {
- mMediaRecorder.start(); // Recording is now started
- } catch (RuntimeException e) {
- Log.e(TAG, "Could not start media recorder. ", e);
- releaseMediaRecorder();
- // If start fails, frameworks will not lock the camera for us.
- mCameraDevice.lock();
- return;
+ if (effectsActive()) {
+ try {
+ mEffectsRecorder.startRecording();
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Could not start effects recorder. ", e);
+ releaseEffectsRecorder();
+ return;
+ }
+ } else {
+ try {
+ mMediaRecorder.start(); // Recording is now started
+ } catch (RuntimeException e) {
+ Log.e(TAG, "Could not start media recorder. ", e);
+ releaseMediaRecorder();
+ // If start fails, frameworks will not lock the camera for us.
+ mCameraDevice.lock();
+ return;
+ }
}
+
enableCameraControls(false);
mMediaRecorderRecording = true;
@@ -1458,10 +1616,15 @@ public class VideoCamera extends ActivityBase
Log.v(TAG, "stopVideoRecording");
if (mMediaRecorderRecording) {
boolean shouldAddToMediaStore = false;
- mMediaRecorder.setOnErrorListener(null);
- mMediaRecorder.setOnInfoListener(null);
+
try {
- mMediaRecorder.stop();
+ if (effectsActive()) {
+ mEffectsRecorder.stopRecording();
+ } else {
+ mMediaRecorder.setOnErrorListener(null);
+ mMediaRecorder.setOnInfoListener(null);
+ mMediaRecorder.stop();
+ }
mCurrentVideoFilename = mVideoFilename;
Log.v(TAG, "Setting current video filename: "
+ mCurrentVideoFilename);
@@ -1470,6 +1633,7 @@ public class VideoCamera extends ActivityBase
Log.e(TAG, "stop fail", e);
if (mVideoFilename != null) deleteVideoFile(mVideoFilename);
}
+
mMediaRecorderRecording = false;
showRecordingUI(false);
if (!mIsVideoCaptureIntent) {
@@ -1480,7 +1644,10 @@ public class VideoCamera extends ActivityBase
addVideoToMediaStore();
}
}
- releaseMediaRecorder(); // always release media recorder
+ // always release media recorder
+ if (!effectsActive()) {
+ releaseMediaRecorder();
+ }
}
private void resetScreenOn() {
@@ -1696,6 +1863,37 @@ public class VideoCamera extends ActivityBase
}
@Override
+ public void onActivityResult(int requestCode, int resultCode, Intent data) {
+ switch (requestCode) {
+ case EffectsRecorder.EFFECT_BACKDROPPER:
+ if (resultCode == RESULT_OK) {
+ // onActivityResult() runs before onResume(), so this parameter will be
+ // seen by startPreview from onResume()
+ mEffectUriFromGallery = ((Uri)data.getData()).toString();
+ Log.v(TAG, "Received URI from gallery: " + mEffectUriFromGallery);
+ }
+ break;
+ default:
+ Log.e(TAG, "Unknown activity result sent to Camera!");
+ break;
+ }
+ }
+
+ public void onEffectsUpdate(int effectId, int effectMsg) {
+ if (effectId == EffectsRecorder.EFFECT_BACKDROPPER) {
+ switch (effectMsg) {
+ case EffectsRecorder.EFFECT_MSG_STARTED_LEARNING:
+ mBgLearningMessage.setVisibility(View.VISIBLE);
+ break;
+ case EffectsRecorder.EFFECT_MSG_DONE_LEARNING:
+ case EffectsRecorder.EFFECT_MSG_STOPPING_EFFECT:
+ mBgLearningMessage.setVisibility(View.GONE);
+ break;
+ }
+ }
+ }
+
+ @Override
public void onConfigurationChanged(Configuration config) {
super.onConfigurationChanged(config);
}
@@ -1732,6 +1930,10 @@ public class VideoCamera extends ActivityBase
}
}
+ private boolean effectsActive() {
+ return (mEffectType != EffectsRecorder.EFFECT_NONE);
+ }
+
public void onSharedPreferenceChanged() {
// ignore the events after "onPause()" or preview has not started yet
if (mPausing) return;
@@ -1740,7 +1942,8 @@ public class VideoCamera extends ActivityBase
// startPreview().
if (mCameraDevice == null) return;
- // TODO: apply goofy face effect here.
+ // Check if the current effects selection has changed
+ if (updateEffectSelection()) return;
// Check if camera id is changed.
int cameraId = CameraSettings.readPreferredCameraId(mPreferences);
@@ -1761,7 +1964,11 @@ public class VideoCamera extends ActivityBase
Size size = mParameters.getPreviewSize();
if (size.width != mDesiredPreviewWidth
|| size.height != mDesiredPreviewHeight) {
- mCameraDevice.stopPreview();
+ if (!effectsActive()) {
+ mCameraDevice.stopPreview();
+ } else {
+ mEffectsRecorder.release();
+ }
resizeForPreviewAspectRatio();
startPreview(); // Parameters will be set in startPreview().
} else {
@@ -1772,6 +1979,42 @@ public class VideoCamera extends ActivityBase
}
}
+ private boolean updateEffectSelection() {
+ int currentEffectType = mEffectType;
+ Object currentEffectParameter = mEffectParameter;
+ mEffectType = CameraSettings.readEffectType(mPreferences);
+ mEffectParameter = CameraSettings.readEffectParameter(mPreferences);
+
+ if (mEffectType == currentEffectType) {
+ if (mEffectType == EffectsRecorder.EFFECT_NONE) return false;
+ if (mEffectParameter.equals(currentEffectParameter)) return false;
+ }
+ Log.v(TAG, "New effect selection: " + mPreferences.getString(CameraSettings.KEY_VIDEO_EFFECT, "none") );
+
+ if ( mEffectType == EffectsRecorder.EFFECT_NONE ) {
+ // Stop effects and return to normal preview
+ mEffectsRecorder.stopPreview();
+ return true;
+ }
+ if (mEffectType == EffectsRecorder.EFFECT_BACKDROPPER &&
+ ((String)mEffectParameter).equals(EFFECT_BG_FROM_GALLERY)) {
+ // Request video from gallery to use for background
+ Intent i = new Intent(Intent.ACTION_PICK);
+ i.setDataAndType(Video.Media.EXTERNAL_CONTENT_URI,
+ "video/*");
+ startActivityForResult(i, EffectsRecorder.EFFECT_BACKDROPPER);
+ return true;
+ }
+ if (currentEffectType == EffectsRecorder.EFFECT_NONE) {
+ // Start up effects
+ startPreview();
+ } else {
+ // Switch currently running effect
+ mEffectsRecorder.setEffect(mEffectType, mEffectParameter);
+ }
+ return true;
+ }
+
private void showTimeLapseUI(boolean enable) {
if (mTimeLapseLabel != null) {
mTimeLapseLabel.setVisibility(enable ? View.VISIBLE : View.GONE);