/* * Copyright (C) 2009 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.camera.util; import android.app.Activity; import android.app.AlertDialog; import android.app.admin.DevicePolicyManager; import android.content.ActivityNotFoundException; import android.content.ComponentName; import android.content.ContentResolver; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Matrix; import android.graphics.Point; import android.graphics.Rect; import android.graphics.RectF; import android.hardware.Camera; import android.hardware.Camera.CameraInfo; import android.hardware.Camera.Parameters; import android.hardware.Camera.Size; import android.hardware.camera2.CameraCharacteristics; import android.location.Location; import android.media.MediaRecorder; import android.net.Uri; import android.os.Handler; import android.os.ParcelFileDescriptor; import android.telephony.TelephonyManager; import android.text.TextUtils; import android.util.DisplayMetrics; import android.util.Log; import android.util.TypedValue; import android.view.Display; import android.view.OrientationEventListener; import android.view.Surface; import android.view.View; import android.view.WindowManager; import android.view.animation.AlphaAnimation; import android.view.animation.Animation; import android.widget.Toast; import android.os.SystemProperties; import com.android.camera.CameraActivity; import com.android.camera.CameraDisabledException; import com.android.camera.CameraHolder; import com.android.camera.CameraManager; import com.android.camera.CameraSettings; import com.android.camera.ui.RotateTextToast; import com.android.camera.util.IntentHelper; import org.codeaurora.snapcam.R; import java.io.Closeable; import java.io.IOException; import java.lang.reflect.Method; import java.text.SimpleDateFormat; import java.util.Arrays; import java.util.Comparator; import java.util.Date; import java.util.List; import java.util.ArrayList; import java.util.Locale; import android.util.Range; import java.util.StringTokenizer; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; import com.android.camera.SettingsManager; import android.hardware.camera2.CameraAccessException; import android.hardware.camera2.CameraCaptureSession; import android.hardware.camera2.params.StreamConfigurationMap; import android.hardware.camera2.CaptureRequest; import android.hardware.camera2.impl.CameraMetadataNative; import android.hardware.camera2.utils.SurfaceUtils; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.Set; import java.util.TreeSet; import static android.content.Context.MODE_PRIVATE; /** * Collection of utility functions used in this package. */ public class CameraUtil { private static final String TAG = "Util"; // For calculate the best fps range for still image capture. private final static int MAX_PREVIEW_FPS_TIMES_1000 = 400000; private final static int PREFERRED_PREVIEW_FPS_TIMES_1000 = 30000; // For creating crop intents. public static final String KEY_RETURN_DATA = "return-data"; public static final String KEY_SHOW_WHEN_LOCKED = "showWhenLocked"; // Orientation hysteresis amount used in rounding, in degrees public static final int ORIENTATION_HYSTERESIS = 10; public static final String REVIEW_ACTION = "com.android.camera.action.REVIEW"; // See android.hardware.Camera.ACTION_NEW_PICTURE. public static final String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE"; // See android.hardware.Camera.ACTION_NEW_VIDEO. public static final String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO"; // Broadcast Action: The camera application has become active in picture-taking mode. public static final String ACTION_CAMERA_STARTED = "com.android.camera.action.CAMERA_STARTED"; // Broadcast Action: The camera application is no longer in active picture-taking mode. public static final String ACTION_CAMERA_STOPPED = "com.android.camera.action.CAMERA_STOPPED"; // When the camera application is active in picture-taking mode, it listens for this intent, // which upon receipt will trigger the shutter to capture a new picture, as if the user had // pressed the shutter button. public static final String ACTION_CAMERA_SHUTTER_CLICK = "com.android.camera.action.SHUTTER_CLICK"; public static final String INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE = "android.media.action.STILL_IMAGE_CAMERA_SECURE"; public static final String ACTION_IMAGE_CAPTURE_SECURE = "android.media.action.IMAGE_CAPTURE_SECURE"; public static final String SECURE_CAMERA_EXTRA = "secure_camera"; // Fields from android.hardware.Camera.Parameters public static final String FOCUS_MODE_CONTINUOUS_PICTURE = "continuous-picture"; public static final String FOCUS_MODE_MW_CONTINUOUS_PICTURE = "mw_continuous-picture"; public static final String RECORDING_HINT = "recording-hint"; private static final String AUTO_EXPOSURE_LOCK_SUPPORTED = "auto-exposure-lock-supported"; private static final String AUTO_WHITE_BALANCE_LOCK_SUPPORTED = "auto-whitebalance-lock-supported"; private static final String VIDEO_SNAPSHOT_SUPPORTED = "video-snapshot-supported"; private static final String AUTO_HDR_SUPPORTED = "auto-hdr-supported"; public static final String SCENE_MODE_HDR = "hdr"; public static final String TRUE = "true"; public static final String FALSE = "false"; // Hardware camera key mask private static final int KEY_MASK_CAMERA = 0x20; private static final Class[] CTOR_SIGNATURE = new Class[] {CaptureRequest.class, CameraMetadataNative.class, boolean.class, int.class}; // Fields for the show-on-maps-functionality private static final String MAPS_PACKAGE_NAME = "com.google.android.apps.maps"; private static final String MAPS_CLASS_NAME = "com.google.android.maps.MapsActivity"; /** Has to be in sync with the receiving MovieActivity. */ public static final String KEY_TREAT_UP_AS_BACK = "treat-up-as-back"; /** Judge the value whether is from lockscreen come in or not */ public static final String KEY_IS_SECURE_CAMERA = "is_secure_camera"; public static final int RATIO_UNKNOWN = 0; public static final int RATIO_16_9 = 1; public static final int RATIO_4_3 = 2; public static final int RATIO_3_2 = 3; public static final int MODE_TWO_BT = 1; public static final int MODE_ONE_BT = 0; private static final String DIALOG_CONFIG = "dialog_config"; public static final String KEY_SAVE = "save"; public static final String KEY_DELETE = "delete"; public static final String KEY_DELETE_ALL = "delete_all"; public static boolean isSupported(String value, List supported) { return supported == null ? false : supported.indexOf(value) >= 0; } public static boolean isAutoExposureLockSupported(Parameters params) { return TRUE.equals(params.get(AUTO_EXPOSURE_LOCK_SUPPORTED)); } public static boolean isAutoHDRSupported(Parameters params) { return TRUE.equals(params.get(AUTO_HDR_SUPPORTED)); } public static boolean isAutoWhiteBalanceLockSupported(Parameters params) { return TRUE.equals(params.get(AUTO_WHITE_BALANCE_LOCK_SUPPORTED)); } public static boolean isVideoSnapshotSupported(Parameters params) { if (params == null) return false; return TRUE.equals(params.get(VIDEO_SNAPSHOT_SUPPORTED)); } public static boolean isCameraHdrSupported(Parameters params) { List supported = params.getSupportedSceneModes(); return (supported != null) && supported.contains(SCENE_MODE_HDR); } public static boolean hasCameraKey() { return (sDeviceKeysPresent & KEY_MASK_CAMERA) != 0; } public static boolean isMeteringAreaSupported(Parameters params) { return params.getMaxNumMeteringAreas() > 0; } public static boolean isFocusAreaSupported(Parameters params) { return (params.getMaxNumFocusAreas() > 0 && isSupported(Parameters.FOCUS_MODE_AUTO, params.getSupportedFocusModes())); } public static boolean isSupported(Parameters params, String key) { return (params.get(key) != null && !"null".equals(params.get(key))); } public static int getNumSnapsPerShutter(Parameters params) { String numJpegs = params.get("num-jpegs-per-shutter"); if (!TextUtils.isEmpty(numJpegs)) { return Integer.valueOf(numJpegs); } String numSnaps = params.get("num-snaps-per-shutter"); if (!TextUtils.isEmpty(numSnaps)) { return Integer.valueOf(numSnaps); } return 1; } // Private intent extras. Test only. private static final String EXTRAS_CAMERA_FACING = "android.intent.extras.CAMERA_FACING"; private static float sPixelDensity = 1; private static ImageFileNamer sImageFileNamer; // Get available hardware keys private static int sDeviceKeysPresent; private CameraUtil() { } public static void initialize(Context context) { DisplayMetrics metrics = new DisplayMetrics(); WindowManager wm = (WindowManager) context.getSystemService(Context.WINDOW_SERVICE); wm.getDefaultDisplay().getMetrics(metrics); sPixelDensity = metrics.density; sImageFileNamer = new ImageFileNamer( context.getString(R.string.image_file_name_format)); sDeviceKeysPresent = context.getResources().getInteger( org.lineageos.platform.internal.R.integer.config_deviceHardwareKeys); } public static int dpToPixel(int dp) { return Math.round(sPixelDensity * dp); } // Rotates the bitmap by the specified degree. // If a new bitmap is created, the original bitmap is recycled. public static Bitmap rotate(Bitmap b, int degrees) { return rotateAndMirror(b, degrees, false); } // Rotates and/or mirrors the bitmap. If a new bitmap is created, the // original bitmap is recycled. public static Bitmap rotateAndMirror(Bitmap b, int degrees, boolean mirror) { if ((degrees != 0 || mirror) && b != null) { Matrix m = new Matrix(); // Mirror first. // horizontal flip + rotation = -rotation + horizontal flip if (mirror) { m.postScale(-1, 1); degrees = (degrees + 360) % 360; if (degrees == 0 || degrees == 180) { m.postTranslate(b.getWidth(), 0); } else if (degrees == 90 || degrees == 270) { m.postTranslate(b.getHeight(), 0); } else { throw new IllegalArgumentException("Invalid degrees=" + degrees); } } if (degrees != 0) { // clockwise m.postRotate(degrees, (float) b.getWidth() / 2, (float) b.getHeight() / 2); } try { Bitmap b2 = Bitmap.createBitmap( b, 0, 0, b.getWidth(), b.getHeight(), m, true); if (b != b2) { b.recycle(); b = b2; } } catch (OutOfMemoryError ex) { // We have no memory to rotate. Return the original bitmap. } } return b; } /* * Compute the sample size as a function of minSideLength * and maxNumOfPixels. * minSideLength is used to specify that minimal width or height of a * bitmap. * maxNumOfPixels is used to specify the maximal size in pixels that is * tolerable in terms of memory usage. * * The function returns a sample size based on the constraints. * Both size and minSideLength can be passed in as -1 * which indicates no care of the corresponding constraint. * The functions prefers returning a sample size that * generates a smaller bitmap, unless minSideLength = -1. * * Also, the function rounds up the sample size to a power of 2 or multiple * of 8 because BitmapFactory only honors sample size this way. * For example, BitmapFactory downsamples an image by 2 even though the * request is 3. So we round up the sample size to avoid OOM. */ public static int computeSampleSize(BitmapFactory.Options options, int minSideLength, int maxNumOfPixels) { int initialSize = computeInitialSampleSize(options, minSideLength, maxNumOfPixels); int roundedSize; if (initialSize <= 8) { roundedSize = 1; while (roundedSize < initialSize) { roundedSize <<= 1; } } else { roundedSize = (initialSize + 7) / 8 * 8; } return roundedSize; } private static int computeInitialSampleSize(BitmapFactory.Options options, int minSideLength, int maxNumOfPixels) { double w = options.outWidth; double h = options.outHeight; int lowerBound = (maxNumOfPixels < 0) ? 1 : (int) Math.ceil(Math.sqrt(w * h / maxNumOfPixels)); int upperBound = (minSideLength < 0) ? 128 : (int) Math.min(Math.floor(w / minSideLength), Math.floor(h / minSideLength)); if (upperBound < lowerBound) { // return the larger one when there is no overlapping zone. return lowerBound; } if (maxNumOfPixels < 0 && minSideLength < 0) { return 1; } else if (minSideLength < 0) { return lowerBound; } else { return upperBound; } } public static Bitmap makeBitmap(byte[] jpegData, int maxNumOfPixels) { try { BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, options); if (options.mCancel || options.outWidth == -1 || options.outHeight == -1) { return null; } options.inSampleSize = computeSampleSize( options, -1, maxNumOfPixels); options.inJustDecodeBounds = false; options.inDither = false; options.inPreferredConfig = Bitmap.Config.ARGB_8888; return BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length, options); } catch (OutOfMemoryError ex) { Log.e(TAG, "Got oom exception ", ex); return null; } } public static void closeSilently(Closeable c) { if (c == null) return; try { c.close(); } catch (Throwable t) { // do nothing } } public static void Assert(boolean cond) { if (!cond) { throw new AssertionError(); } } private static void throwIfCameraDisabled(Activity activity) throws CameraDisabledException { // Check if device policy has disabled the camera. DevicePolicyManager dpm = (DevicePolicyManager) activity.getSystemService( Context.DEVICE_POLICY_SERVICE); if (dpm.getCameraDisabled(null)) { throw new CameraDisabledException(); } } public static boolean isCamera2Supported(Context context) { android.hardware.camera2.CameraManager manager = (android.hardware.camera2.CameraManager) context.getSystemService(Context.CAMERA_SERVICE); try { String[] cameraIds = manager.getCameraIdList(); if (cameraIds != null && cameraIds.length > 0) { CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraIds[0]); int deviceLevel = characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL); switch (deviceLevel) { case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED: return true; case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL: return true; case CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3: return true; default: return false; } } } catch(CameraAccessException | NumberFormatException e) { Log.e(TAG, "exception trying to get camera characteristics"); } return false; } public static CameraManager.CameraProxy openCamera( Activity activity, final int cameraId, Handler handler, final CameraManager.CameraOpenErrorCallback cb) { try { throwIfCameraDisabled(activity); return CameraHolder.instance().open(handler, cameraId, cb); } catch (CameraDisabledException ex) { handler.post(new Runnable() { @Override public void run() { cb.onCameraDisabled(cameraId); } }); } return null; } public static void showErrorAndFinish(final Activity activity, int msgId) { if (activity == null || activity.isFinishing()) return; DialogInterface.OnClickListener buttonListener = new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { activity.finish(); } }; TypedValue out = new TypedValue(); activity.getTheme().resolveAttribute(android.R.attr.alertDialogIcon, out, true); new AlertDialog.Builder(activity) .setCancelable(false) .setTitle(R.string.camera_error_title) .setMessage(msgId) .setNeutralButton(R.string.dialog_ok, buttonListener) .setIcon(out.resourceId) .show(); } public static T checkNotNull(T object) { if (object == null) throw new NullPointerException(); return object; } public static boolean equals(Object a, Object b) { return (a == b) || (a == null ? false : a.equals(b)); } public static int nextPowerOf2(int n) { n -= 1; n |= n >>> 16; n |= n >>> 8; n |= n >>> 4; n |= n >>> 2; n |= n >>> 1; return n + 1; } public static float distance(float x, float y, float sx, float sy) { float dx = x - sx; float dy = y - sy; return (float) Math.sqrt(dx * dx + dy * dy); } public static int clamp(int x, int min, int max) { if (x > max) return max; if (x < min) return min; return x; } public static float clamp(float x, float min, float max) { if (x > max) return max; if (x < min) return min; return x; } public static int getDisplayRotation(Activity activity) { int rotation = activity.getWindowManager().getDefaultDisplay() .getRotation(); switch (rotation) { case Surface.ROTATION_0: return 0; case Surface.ROTATION_90: return 90; case Surface.ROTATION_180: return 180; case Surface.ROTATION_270: return 270; } return 0; } public static boolean isScreenRotated(Activity activity) { int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); return rotation != Surface.ROTATION_0 && rotation != Surface.ROTATION_180; } /** * Calculate the default orientation of the device based on the width and * height of the display when rotation = 0 (i.e. natural width and height) * @param activity the activity context * @return whether the default orientation of the device is portrait */ public static boolean isDefaultToPortrait(Activity activity) { Display currentDisplay = activity.getWindowManager().getDefaultDisplay(); Point displaySize = new Point(); currentDisplay.getSize(displaySize); int orientation = currentDisplay.getRotation(); int naturalWidth, naturalHeight; if (orientation == Surface.ROTATION_0 || orientation == Surface.ROTATION_180) { naturalWidth = displaySize.x; naturalHeight = displaySize.y; } else { naturalWidth = displaySize.y; naturalHeight = displaySize.x; } return naturalWidth < naturalHeight; } public static int getDisplayOrientation(int degrees, int cameraId) { // See android.hardware.Camera.setDisplayOrientation for // documentation. Camera.CameraInfo info = new Camera.CameraInfo(); Camera.getCameraInfo(cameraId, info); int result; if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { result = (info.orientation + degrees) % 360; result = (360 - result) % 360; // compensate the mirror } else { // back-facing result = (info.orientation - degrees + 360) % 360; } return result; } public static int getDisplayOrientationForCamera2(int degrees, int cameraId) { CameraCharacteristics info = CameraHolder.instance().getCameraCharacteristics(cameraId); int result; if (info.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) { result = (info.get(CameraCharacteristics.SENSOR_ORIENTATION) + degrees) % 360; result = (360 - result) % 360; // compensate the mirror } else { result = (info.get(CameraCharacteristics.SENSOR_ORIENTATION) - degrees + 360) % 360; } return result; } public static int getCameraOrientation(int cameraId) { Camera.CameraInfo info = new Camera.CameraInfo(); Camera.getCameraInfo(cameraId, info); return info.orientation; } public static int roundOrientation(int orientation, int orientationHistory) { boolean changeOrientation = false; if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) { changeOrientation = true; } else { int dist = Math.abs(orientation - orientationHistory); dist = Math.min( dist, 360 - dist ); changeOrientation = ( dist >= 60 + ORIENTATION_HYSTERESIS ); } if (changeOrientation) { return ((orientation + 45) / 90 * 90) % 360; } return orientationHistory; } private static Point getDefaultDisplaySize(Activity activity, Point size) { activity.getWindowManager().getDefaultDisplay().getSize(size); //cap the display resolution given to getOptimalPreviewSize if the below properties //are set. For example if the properties are set as below : //adb shell setprop camera.display.umax 1920x1080 //adb shell setprop camera.display.lmax 1280x720 //Then, in devices having display panel size >1080p, panel size will be seen as 1080p. //If its 1080p or lesser (but >=720p), limit it to next allowed max which is 720p. //For < 720p, there is no need to do any capping. //By capping the panel size, we are indirectly controlling the preview size being //chosen in getOptimalPreviewSize(). String uMax = PersistUtil.getDisplayUMax(); String lMax = PersistUtil.getDisplayLMax(); if ((uMax.length() > 0) && (lMax.length() > 0)) { Log.v(TAG,"display uMax "+ uMax + " lMax " + lMax); String uMaxArr[] = uMax.split("x", 2); String lMaxArr[] = lMax.split("x", 2); try { int uMaxWidth = Integer.parseInt(uMaxArr[0]); int uMaxHeight = Integer.parseInt(uMaxArr[1]); int lMaxWidth = Integer.parseInt(lMaxArr[0]); int lMaxHeight = Integer.parseInt(lMaxArr[1]); int defaultDisplaySize = (size.x * size.y); if (defaultDisplaySize > (uMaxWidth*uMaxHeight)) { size.set(uMaxWidth,uMaxHeight); } else if (defaultDisplaySize >= (lMaxWidth*lMaxHeight)) { size.set(lMaxWidth,lMaxHeight); } else { Log.v(TAG,"No need to cap display size"); } } catch (Exception e) { Log.e(TAG,"Invalid display properties"); } } return size; } public static Size getOptimalPreviewSize(Activity currentActivity, List sizes, double targetRatio) { Point[] points = new Point[sizes.size()]; int index = 0; for (Size s : sizes) { points[index++] = new Point(s.width, s.height); } int optimalPickIndex = getOptimalPreviewSize(currentActivity, points, targetRatio); return (optimalPickIndex == -1) ? null : sizes.get(optimalPickIndex); } public static int getOptimalPreviewSize(Activity currentActivity, Point[] sizes, double targetRatio) { // TODO(andyhuibers): Don't hardcode this but use device's measurements. final int MAX_ASPECT_HEIGHT = 1440; // Use a very small tolerance because we want an exact match. final double ASPECT_TOLERANCE = 0.02; if (sizes == null) return -1; int optimalSizeIndex = -1; // Because of bugs of overlay and layout, we sometimes will try to // layout the viewfinder in the portrait orientation and thus get the // wrong size of preview surface. When we change the preview size, the // new overlay will be created before the old one closed, which causes // an exception. For now, just get the screen size. Point point = getDefaultDisplaySize(currentActivity, new Point()); final double ratio_4_3 = (double)4/3; int targetHeight = Math.min(point.x, point.y); double minDiff = targetHeight; // Try to find an size match aspect ratio and size for (int i = 0; i < sizes.length; i++) { Point size = sizes[i]; double ratio = (double) size.x / size.y; if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; // Count sizes with height <= 1080p to mimic camera1 api behavior. if (size.y > MAX_ASPECT_HEIGHT) continue; if (ratio_4_3 == targetRatio) { if (size.y > minDiff) continue; } double heightDiff = Math.abs(size.y - targetHeight); if (heightDiff < minDiff) { optimalSizeIndex = i; minDiff = Math.abs(size.y - targetHeight); } else if (heightDiff == minDiff) { // Prefer resolutions smaller-than-display when an equally close // larger-than-display resolution is available if (size.y < targetHeight) { optimalSizeIndex = i; minDiff = heightDiff; } } } // Cannot find the one match the aspect ratio. This should not happen. // Ignore the requirement. if (optimalSizeIndex == -1) { Log.w(TAG, "No preview size match the aspect ratio"); minDiff = Double.MAX_VALUE; for (int i = 0; i < sizes.length; i++) { Point size = sizes[i]; if (Math.abs(size.y - targetHeight) < minDiff) { optimalSizeIndex = i; minDiff = Math.abs(size.y - targetHeight); } } } return optimalSizeIndex; } public static Size getOptimalVideoPreviewSize(Activity currentActivity, List sizes, android.util.Size targetSize) { Point[] points = new Point[sizes.size()]; int index = 0; for (Size s : sizes) { points[index++] = new Point(s.width, s.height); } int optimalPickIndex = getOptimalVideoPreviewSize(currentActivity, points, targetSize); return (optimalPickIndex == -1) ? null : sizes.get(optimalPickIndex); } public static int getOptimalVideoPreviewSize(Activity currentActivity, Point[] sizes, android.util.Size targetSize) { // Use a very small tolerance because we want an exact match. final double ASPECT_TOLERANCE = 0.01; double targetRatio = (double) targetSize.getWidth() / targetSize.getHeight(); if (sizes == null) return -1; int optimalSizeIndex = -1; double minDiff = Double.MAX_VALUE; // we want the video preview size is not bigger than 1080p // This point is for 1080p Point point = new Point(1920, 1080); int targetHeight = Math.min(point.x, point.y); // we want the video preview size is not bigger than video size int videoMiniHeight = Math.min(targetSize.getWidth(), targetSize.getHeight()); // Try to find an size match aspect ratio and size for (int i = 0; i < sizes.length; i++) { Point size = sizes[i]; double ratio = (double) size.x / size.y; if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; double heightDiff = Math.abs(size.y - targetHeight); if (heightDiff < minDiff && size.y <= videoMiniHeight) { optimalSizeIndex = i; minDiff = Math.abs(size.y - targetHeight); } else if (heightDiff == minDiff) { // Prefer resolutions smaller-than-display when an equally close // larger-than-display resolution is available if (size.y < targetHeight && size.y <= videoMiniHeight) { optimalSizeIndex = i; minDiff = heightDiff; } } } // Cannot find the one match the aspect ratio. This should not happen. // Ignore the requirement. if (optimalSizeIndex == -1) { Log.w(TAG, "No preview size match the aspect ratio"); minDiff = Double.MAX_VALUE; for (int i = 0; i < sizes.length; i++) { Point size = sizes[i]; if (Math.abs(size.y - targetHeight) < minDiff) { optimalSizeIndex = i; minDiff = Math.abs(size.y - targetHeight); } } } return optimalSizeIndex; } // Returns the largest picture size which matches the given aspect ratio. public static Size getOptimalVideoSnapshotPictureSize( List sizes, double targetRatio) { // Use a very small tolerance because we want an exact match. final double ASPECT_TOLERANCE = 0.001; if (sizes == null) return null; Size optimalSize = null; // Try to find a size matches aspect ratio and has the largest width for (Size size : sizes) { double ratio = (double) size.width / size.height; if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; if (optimalSize == null || size.width > optimalSize.width) { optimalSize = size; } } // Cannot find one that matches the aspect ratio. This should not happen. // Ignore the requirement. if (optimalSize == null) { Log.w(TAG, "No picture size match the aspect ratio"); for (Size size : sizes) { if (optimalSize == null || size.width > optimalSize.width) { optimalSize = size; } } } return optimalSize; } // Returns the largest thumbnail size which matches the given aspect ratio. public static Size getOptimalJpegThumbnailSize( List sizes, double targetRatio) { // Use a very small tolerance because we want an exact match. final double ASPECT_TOLERANCE = 0.001; if (sizes == null) return null; Size optimalSize = null; // Try to find a size matches aspect ratio and has the largest width for (Size size : sizes) { double ratio = (double) size.width / size.height; if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue; if (optimalSize == null || size.width > optimalSize.width) { optimalSize = size; } } // Cannot find one that matches the aspect ratio. This should not happen. // Ignore the requirement. if (optimalSize == null) { Log.w(TAG, "No thumbnail size match the aspect ratio"); for (Size size : sizes) { if (optimalSize == null || size.width > optimalSize.width) { optimalSize = size; } } } return optimalSize; } public static void dumpParameters(Parameters params) { Set sortedParams = new TreeSet(); sortedParams.addAll(Arrays.asList(params.flatten().split(";"))); StringBuilder sb = new StringBuilder(); sb.append("["); Iterator i = sortedParams.iterator(); while (i.hasNext()) { String nextParam = i.next(); if ((sb.length() + nextParam.length()) > 2044) { Log.d(TAG, "Parameters: " + sb.toString()); sb = new StringBuilder(); } sb.append(nextParam); if (i.hasNext()) { sb.append(", "); } } sb.append("]"); Log.d(TAG, "Parameters: " + sb.toString()); } /** * Returns whether the device is voice-capable (meaning, it can do MMS). */ public static boolean isMmsCapable(Context context) { TelephonyManager telephonyManager = (TelephonyManager) context.getSystemService(Context.TELEPHONY_SERVICE); if (telephonyManager == null) { return false; } try { Class partypes[] = new Class[0]; Method sIsVoiceCapable = TelephonyManager.class.getMethod( "isVoiceCapable", partypes); Object arglist[] = new Object[0]; Object retobj = sIsVoiceCapable.invoke(telephonyManager, arglist); return (Boolean) retobj; } catch (java.lang.reflect.InvocationTargetException ite) { // Failure, must be another device. // Assume that it is voice capable. } catch (IllegalAccessException iae) { // Failure, must be an other device. // Assume that it is voice capable. } catch (NoSuchMethodException nsme) { } return true; } // This is for test only. Allow the camera to launch the specific camera. public static int getCameraFacingIntentExtras(Activity currentActivity) { int cameraId = -1; int intentCameraId = currentActivity.getIntent().getIntExtra(CameraUtil.EXTRAS_CAMERA_FACING, -1); if (isFrontCameraIntent(intentCameraId)) { // Check if the front camera exist int frontCameraId = CameraHolder.instance().getFrontCameraId(); if (frontCameraId != -1) { cameraId = frontCameraId; } } else if (isBackCameraIntent(intentCameraId)) { // Check if the back camera exist int backCameraId = CameraHolder.instance().getBackCameraId(); if (backCameraId != -1) { cameraId = backCameraId; } } return cameraId; } private static boolean isFrontCameraIntent(int intentCameraId) { return (intentCameraId == android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT); } private static boolean isBackCameraIntent(int intentCameraId) { return (intentCameraId == android.hardware.Camera.CameraInfo.CAMERA_FACING_BACK); } private static int sLocation[] = new int[2]; // This method is not thread-safe. public static boolean pointInView(float x, float y, View v) { v.getLocationInWindow(sLocation); return x >= sLocation[0] && x < (sLocation[0] + v.getWidth()) && y >= sLocation[1] && y < (sLocation[1] + v.getHeight()); } public static int[] getRelativeLocation(View reference, View view) { reference.getLocationInWindow(sLocation); int referenceX = sLocation[0]; int referenceY = sLocation[1]; view.getLocationInWindow(sLocation); sLocation[0] -= referenceX; sLocation[1] -= referenceY; return sLocation; } public static boolean isUriValid(Uri uri, ContentResolver resolver) { if (uri == null) return false; try { ParcelFileDescriptor pfd = resolver.openFileDescriptor(uri, "r"); if (pfd == null) { Log.e(TAG, "Fail to open URI. URI=" + uri); return false; } pfd.close(); } catch (IOException ex) { return false; } return true; } public static void dumpRect(RectF rect, String msg) { Log.v(TAG, msg + "=(" + rect.left + "," + rect.top + "," + rect.right + "," + rect.bottom + ")"); } public static void inlineRectToRectF(RectF rectF, Rect rect) { rect.left = Math.round(rectF.left); rect.top = Math.round(rectF.top); rect.right = Math.round(rectF.right); rect.bottom = Math.round(rectF.bottom); } public static Rect rectFToRect(RectF rectF) { Rect rect = new Rect(); inlineRectToRectF(rectF, rect); return rect; } public static RectF rectToRectF(Rect r) { return new RectF(r.left, r.top, r.right, r.bottom); } public static void prepareMatrix(Matrix matrix, boolean mirror, int displayOrientation, int viewWidth, int viewHeight) { // Need mirror for front camera. matrix.setScale(mirror ? -1 : 1, 1); // This is the value for android.hardware.Camera.setDisplayOrientation. matrix.postRotate(displayOrientation); // Camera driver coordinates range from (-1000, -1000) to (1000, 1000). // UI coordinates range from (0, 0) to (width, height). matrix.postScale(viewWidth / 2000f, viewHeight / 2000f); matrix.postTranslate(viewWidth / 2f, viewHeight / 2f); } public static String createJpegName(long dateTaken, boolean refocus) { synchronized (sImageFileNamer) { return sImageFileNamer.generateName(dateTaken, refocus); } } public static String createJpegName(long dateTaken) { synchronized (sImageFileNamer) { return sImageFileNamer.generateName(dateTaken, false); } } public static void broadcastNewPicture(Context context, Uri uri) { context.sendBroadcast(new Intent(ACTION_NEW_PICTURE, uri)); // Keep compatibility context.sendBroadcast(new Intent("com.android.camera.NEW_PICTURE", uri)); } public static void fadeIn(View view, float startAlpha, float endAlpha, long duration) { if (view.getVisibility() == View.VISIBLE) return; view.setVisibility(View.VISIBLE); Animation animation = new AlphaAnimation(startAlpha, endAlpha); animation.setDuration(duration); view.startAnimation(animation); } public static void fadeIn(View view) { fadeIn(view, 0F, 1F, 400); // We disabled the button in fadeOut(), so enable it here. view.setEnabled(true); } public static void fadeOut(View view) { if (view.getVisibility() != View.VISIBLE) return; // Since the button is still clickable before fade-out animation // ends, we disable the button first to block click. view.setEnabled(false); Animation animation = new AlphaAnimation(1F, 0F); animation.setDuration(400); view.startAnimation(animation); view.setVisibility(View.GONE); } public static Rect getFinalCropRect(Rect rect, float targetRatio) { Rect finalRect = new Rect(rect); float rectRatio = (float) rect.width()/(float) rect.height(); // if ratios are different, adjust crop rect to fit ratio // if ratios are same, no need to adjust crop Log.d(TAG, "getFinalCropRect - rect: " + rect.toString()); Log.d(TAG, "getFinalCropRect - ratios: " + rectRatio + ", " + targetRatio); if(rectRatio > targetRatio) { // ratio indicates need for horizontal crop // add .5 to round up if necessary int newWidth = (int)(((float)rect.height() * targetRatio) + .5f); int newXoffset = (rect.width() - newWidth)/2 + rect.left; finalRect.left = newXoffset; finalRect.right = newXoffset + newWidth; } else if(rectRatio < targetRatio) { // ratio indicates need for vertical crop // add .5 to round up if necessary int newHeight = (int)(((float)rect.width() / targetRatio) + .5f); int newYoffset = (rect.height() - newHeight)/2 + rect.top; finalRect.top = newYoffset; finalRect.bottom = newYoffset + newHeight; } Log.d(TAG, "getFinalCropRect - final rect: " + finalRect.toString()); return finalRect; } public static int getJpegRotation(int cameraId, int orientation) { // See android.hardware.Camera.Parameters.setRotation for // documentation. int rotation = 0; if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) { orientation = 0; } CameraCharacteristics info = CameraHolder.instance().getCameraCharacteristics(cameraId); if (info.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) { rotation = (info.get(CameraCharacteristics.SENSOR_ORIENTATION) - orientation + 360) % 360; } else { // back-facing camera rotation = (info.get(CameraCharacteristics.SENSOR_ORIENTATION) + orientation) % 360; } return rotation; } /** * Down-samples a jpeg byte array. * @param data a byte array of jpeg data * @param downSampleFactor down-sample factor * @return decoded and down-sampled bitmap */ public static Bitmap downSample(final byte[] data, int downSampleFactor) { final BitmapFactory.Options opts = new BitmapFactory.Options(); // Downsample the image opts.inSampleSize = downSampleFactor; return BitmapFactory.decodeByteArray(data, 0, data.length, opts); } public static void setGpsParameters(Parameters parameters, Location loc) { // Clear previous GPS location from the parameters. parameters.removeGpsData(); // We always encode GpsTimeStamp parameters.setGpsTimestamp(System.currentTimeMillis() / 1000); // Set GPS location. if (loc != null) { double lat = loc.getLatitude(); double lon = loc.getLongitude(); boolean hasLatLon = (lat != 0.0d) || (lon != 0.0d); if (hasLatLon) { Log.d(TAG, "Set gps location"); parameters.setGpsLatitude(lat); parameters.setGpsLongitude(lon); parameters.setGpsProcessingMethod(loc.getProvider().toUpperCase()); if (loc.hasAltitude()) { parameters.setGpsAltitude(loc.getAltitude()); } else { // for NETWORK_PROVIDER location provider, we may have // no altitude information, but the driver needs it, so // we fake one. parameters.setGpsAltitude(0); } if (loc.getTime() != 0) { // Location.getTime() is UTC in milliseconds. // gps-timestamp is UTC in seconds. long utcTimeSeconds = loc.getTime() / 1000; parameters.setGpsTimestamp(utcTimeSeconds); } } else { loc = null; } } } public static boolean isLowLuminance(Parameters parameters) { String lC = parameters.get(CameraSettings.KEY_LUMINANCE_CONDITION); if (lC != null) { if (lC.equals(CameraSettings.LUMINANCE_CONDITION_LOW)) { Log.d(TAG, "Parameter " + CameraSettings.KEY_LUMINANCE_CONDITION + "=" + CameraSettings.LUMINANCE_CONDITION_LOW); return true; } } return false; } public static String getFilpModeString(int value){ switch(value){ case 0: return CameraSettings.FLIP_MODE_OFF; case 1: return CameraSettings.FLIP_MODE_H; case 2: return CameraSettings.FLIP_MODE_V; case 3: return CameraSettings.FLIP_MODE_VH; default: return null; } } /** * For still image capture, we need to get the right fps range such that the * camera can slow down the framerate to allow for less-noisy/dark * viewfinder output in dark conditions. * * @param params Camera's parameters. * @return null if no appropiate fps range can't be found. Otherwise, return * the right range. */ public static int[] getPhotoPreviewFpsRange(Parameters params) { return getPhotoPreviewFpsRange(params.getSupportedPreviewFpsRange()); } public static int[] getPhotoPreviewFpsRange(List frameRates) { if (frameRates.size() == 0) { Log.e(TAG, "No suppoted frame rates returned!"); return null; } // Find the lowest min rate in supported ranges who can cover 30fps. int lowestMinRate = MAX_PREVIEW_FPS_TIMES_1000; for (int[] rate : frameRates) { int minFps = rate[Parameters.PREVIEW_FPS_MIN_INDEX]; int maxFps = rate[Parameters.PREVIEW_FPS_MAX_INDEX]; if (maxFps >= PREFERRED_PREVIEW_FPS_TIMES_1000 && minFps <= PREFERRED_PREVIEW_FPS_TIMES_1000 && minFps < lowestMinRate) { lowestMinRate = minFps; } } // Find all the modes with the lowest min rate found above, the pick the // one with highest max rate. int resultIndex = -1; int highestMaxRate = 0; for (int i = 0; i < frameRates.size(); i++) { int[] rate = frameRates.get(i); int minFps = rate[Parameters.PREVIEW_FPS_MIN_INDEX]; int maxFps = rate[Parameters.PREVIEW_FPS_MAX_INDEX]; if (minFps == lowestMinRate && highestMaxRate < maxFps) { highestMaxRate = maxFps; resultIndex = i; } } if (resultIndex >= 0) { return frameRates.get(resultIndex); } Log.e(TAG, "Can't find an appropiate frame rate range!"); return null; } public static int[] getMaxPreviewFpsRange(Parameters params) { List frameRates = params.getSupportedPreviewFpsRange(); if (frameRates != null && frameRates.size() > 0) { // The list is sorted. Return the last element. return frameRates.get(frameRates.size() - 1); } return new int[0]; } public static int getMaxPreviewFps(Parameters params) { List frameRates = params.getSupportedPreviewFrameRates(); if (frameRates != null && frameRates.size() > 0) { // The list is sorted. Return the last element. return frameRates.get(frameRates.size() - 1).intValue(); } return -1; } private static class ImageFileNamer { private final SimpleDateFormat mFormat; private final int REFOCUS_DEPTHMAP_IDX = 5; private final String REFOCUS_DEPTHMAP_SUFFIX = "DepthMap"; private final int REFOCUS_ALLFOCUS_IDX = 6; private final String REFOCUS_ALLFOCUS_SUFFIX = "Allfocus"; private int mRefocusIdx = 0; // The date (in milliseconds) used to generate the last name. private long mLastDate; // Number of names generated for the same second. private int mSameSecondCount; public ImageFileNamer(String format) { mFormat = new SimpleDateFormat(format); } public String generateName(long dateTaken, boolean refocus) { Date date = new Date(dateTaken); String result = mFormat.format(date); if (refocus) { if (mRefocusIdx == REFOCUS_DEPTHMAP_IDX) { result += "_" + REFOCUS_DEPTHMAP_SUFFIX; mRefocusIdx++; } else if (mRefocusIdx == REFOCUS_ALLFOCUS_IDX) { result += "_" + REFOCUS_ALLFOCUS_SUFFIX; mRefocusIdx = 0; } else { result += "_" + mRefocusIdx; mRefocusIdx++; } } else { // If the last name was generated for the same second, // we append _1, _2, etc to the name. if (dateTaken / 1000 == mLastDate / 1000) { mSameSecondCount++; result += "_" + mSameSecondCount; } else { mLastDate = dateTaken; mSameSecondCount = 0; } } return result; } } public static void playVideo(Activity activity, Uri uri, String title) { try { boolean isSecureCamera = ((CameraActivity)activity).isSecureCamera(); UsageStatistics.onEvent(UsageStatistics.COMPONENT_CAMERA, UsageStatistics.ACTION_PLAY_VIDEO, null); if (!isSecureCamera) { Intent intent = IntentHelper.getVideoPlayerIntent(activity, uri) .putExtra(Intent.EXTRA_TITLE, title) .putExtra(KEY_TREAT_UP_AS_BACK, true); activity.startActivityForResult(intent, CameraActivity.REQ_CODE_DONT_SWITCH_TO_PREVIEW); } else { // In order not to send out any intent to be intercepted and // show the lock screen immediately, we just let the secure // camera activity finish. activity.finish(); } } catch (ActivityNotFoundException e) { RotateTextToast.makeText(activity, activity.getString(R.string.video_err), Toast.LENGTH_SHORT).show(); } } /** * Starts GMM with the given location shown. If this fails, and GMM could * not be found, we use a geo intent as a fallback. * * @param activity the activity to use for launching the Maps intent. * @param latLong a 2-element array containing {latitude/longitude}. */ public static void showOnMap(Activity activity, double[] latLong) { try { // We don't use "geo:latitude,longitude" because it only centers // the MapView to the specified location, but we need a marker // for further operations (routing to/from). // The q=(lat, lng) syntax is suggested by geo-team. String uri = String.format(Locale.ENGLISH, "http://maps.google.com/maps?f=q&q=(%f,%f)", latLong[0], latLong[1]); ComponentName compName = new ComponentName(MAPS_PACKAGE_NAME, MAPS_CLASS_NAME); Intent mapsIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(uri)).setComponent(compName); activity.startActivityForResult(mapsIntent, CameraActivity.REQ_CODE_DONT_SWITCH_TO_PREVIEW); } catch (ActivityNotFoundException e) { // Use the "geo intent" if no GMM is installed Log.e(TAG, "GMM activity not found!", e); String url = String.format(Locale.ENGLISH, "geo:%f,%f", latLong[0], latLong[1]); try { Intent mapsIntent = new Intent(Intent.ACTION_VIEW, Uri.parse(url)); activity.startActivity(mapsIntent); } catch (ActivityNotFoundException ex) { Log.e(TAG, "Map view activity not found!", ex); RotateTextToast.makeText(activity, activity.getString(R.string.map_activity_not_found_err), Toast.LENGTH_SHORT).show(); } } } /** * Dumps the stack trace. * * @param level How many levels of the stack are dumped. 0 means all. * @return A {@link java.lang.String} of all the output with newline * between each. */ public static String dumpStackTrace(int level) { StackTraceElement[] elems = Thread.currentThread().getStackTrace(); // Ignore the first 3 elements. level = (level == 0 ? elems.length : Math.min(level + 3, elems.length)); String ret = new String(); for (int i = 3; i < level; i++) { ret = ret + "\t" + elems[i].toString() + '\n'; } return ret; } public static int determineRatio(int width, int height) { if (height != 0) { return determineRatio(((float) width) / height); } return RATIO_UNKNOWN; } public static int determineRatio(float ratio) { if (ratio < 1) { ratio = 1 / ratio; } if (ratio > 1.33f && ratio < 1.34f) { return RATIO_4_3; } else if (ratio > 1.77f && ratio < 1.78f) { return RATIO_16_9; } else if (ratio > 1.49f && ratio < 1.51f) { return RATIO_3_2; } else { return RATIO_UNKNOWN; } } public static int determinCloseRatio(float ratio) { int retRatio = RATIO_UNKNOWN; if (ratio != 1.0) { if (ratio < 1) { ratio = 1 / ratio; } float diffFrom_4_3 = ((float) 4 / 3) / ratio; if (diffFrom_4_3 < 1) { diffFrom_4_3 = 1 / diffFrom_4_3; } float diffFrom_16_9 = ((float) 16 / 9) / ratio; if (diffFrom_16_9 < 1) { diffFrom_16_9 = 1 / diffFrom_16_9; } float diffFrom_3_2 = ((float) 3 / 2) / ratio; if (diffFrom_3_2 < 1) { diffFrom_3_2 = 1 / diffFrom_3_2; } float minDiffRatio = diffFrom_3_2; if (diffFrom_3_2 < diffFrom_4_3) { retRatio = RATIO_3_2; minDiffRatio = diffFrom_3_2; } else { retRatio = RATIO_4_3; minDiffRatio = diffFrom_4_3; } if (minDiffRatio > diffFrom_16_9) { retRatio = RATIO_16_9; } } return retRatio; } public static String millisecondToTimeString(long milliSeconds, boolean displayCentiSeconds) { long seconds = milliSeconds / 1000; // round down to compute seconds long minutes = seconds / 60; long hours = minutes / 60; long remainderMinutes = minutes - (hours * 60); long remainderSeconds = seconds - (minutes * 60); StringBuilder timeStringBuilder = new StringBuilder(); // Hours if (hours > 0) { if (hours < 10) { timeStringBuilder.append('0'); } timeStringBuilder.append(hours); timeStringBuilder.append(':'); } // Minutes if (remainderMinutes < 10) { timeStringBuilder.append('0'); } timeStringBuilder.append(remainderMinutes); timeStringBuilder.append(':'); // Seconds if (remainderSeconds < 10) { timeStringBuilder.append('0'); } timeStringBuilder.append(remainderSeconds); // Centi seconds if (displayCentiSeconds) { timeStringBuilder.append('.'); long remainderCentiSeconds = (milliSeconds - seconds * 1000) / 10; if (remainderCentiSeconds < 10) { timeStringBuilder.append('0'); } timeStringBuilder.append(remainderCentiSeconds); } return timeStringBuilder.toString(); } public static String convertOutputFormatToMimeType(int outputFileFormat) { if (outputFileFormat == MediaRecorder.OutputFormat.MPEG_4) { return "video/mp4"; } return "video/3gpp"; } public static String convertOutputFormatToFileExt(int outputFileFormat) { if (outputFileFormat == MediaRecorder.OutputFormat.MPEG_4) { return ".mp4"; } return ".3gp"; } /** * Compares two {@code Size}s based on their areas. */ public static class CompareSizesByArea implements Comparator { @Override public int compare(android.util.Size lhs, android.util.Size rhs) { // We cast here to ensure the multiplications won't overflow return Long.signum((long) lhs.getWidth() * lhs.getHeight() - (long) rhs.getWidth() * rhs.getHeight()); } } public static List createHighSpeedRequestList(final CaptureRequest request) throws CameraAccessException { if (request == null) { throw new IllegalArgumentException("Input capture request must not be null"); } Set physicalCameraIdSet = null; Collection outputSurfaces = request.getTargets(); Range fpsRange = request.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE); try { StreamConfigurationMap config = SettingsManager.getInstance().getStreamConfigurationMap((int)request.getTag()); SurfaceUtils.checkConstrainedHighSpeedSurfaces(outputSurfaces, fpsRange, config); } catch (IllegalArgumentException e) { Log.w(TAG, " checkConstrainedHighSpeedSurfaces occur " + e.toString()); } // Request list size: to limit the preview to 30fps, need use maxFps/30; to maximize // the preview frame rate, should use maxBatch size for that high speed stream // configuration. We choose the former for now. int requestListSize = getHighSpeedVideoConfigsLists((int)request.getTag()); if (requestListSize == -1) { requestListSize = fpsRange.getUpper() / 30; } List requestList = new ArrayList(); // Prepare the Request builders: need carry over the request controls. // First, create a request builder that will only include preview or recording target. CameraMetadataNative requestMetadata = new CameraMetadataNative(request.getNativeCopy()); // Note that after this step, the requestMetadata is mutated (swapped) and can not be used // for next request builder creation. CaptureRequest.Builder singleTargetRequestBuilder = constructorCaptureRequestBuilder( requestMetadata, /*reprocess*/false, CameraCaptureSession.SESSION_ID_NONE, request, physicalCameraIdSet); singleTargetRequestBuilder.setTag(request.getTag()); // Overwrite the capture intent to make sure a good value is set. Iterator iterator = outputSurfaces.iterator(); Surface firstSurface = iterator.next(); Surface secondSurface = null; if (outputSurfaces.size() == 1 && SurfaceUtils.isSurfaceForHwVideoEncoder(firstSurface)) { singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW); } else { // Video only, or preview + video singleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD); } singleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/true); // Second, Create a request builder that will include both preview and recording targets. CaptureRequest.Builder doubleTargetRequestBuilder = null; if (outputSurfaces.size() == 2) { // Have to create a new copy, the original one was mutated after a new // CaptureRequest.Builder creation. requestMetadata = new CameraMetadataNative(request.getNativeCopy()); doubleTargetRequestBuilder = constructorCaptureRequestBuilder(requestMetadata, /*reprocess*/false, CameraCaptureSession.SESSION_ID_NONE, request, physicalCameraIdSet); doubleTargetRequestBuilder.setTag(request.getTag()); doubleTargetRequestBuilder.set(CaptureRequest.CONTROL_CAPTURE_INTENT, CaptureRequest.CONTROL_CAPTURE_INTENT_VIDEO_RECORD); doubleTargetRequestBuilder.addTarget(firstSurface); secondSurface = iterator.next(); doubleTargetRequestBuilder.addTarget(secondSurface); doubleTargetRequestBuilder.setPartOfCHSRequestList(/*partOfCHSList*/true); // Make sure singleTargetRequestBuilder contains only recording surface for // preview + recording case. Surface recordingSurface = firstSurface; if (!SurfaceUtils.isSurfaceForHwVideoEncoder(recordingSurface)) { recordingSurface = secondSurface; } singleTargetRequestBuilder.addTarget(recordingSurface); } else { // Single output case: either recording or preview. singleTargetRequestBuilder.addTarget(firstSurface); } // Generate the final request list. for (int i = 0; i < requestListSize; i++) { if (i == 0 && doubleTargetRequestBuilder != null) { // First request should be recording + preview request requestList.add(doubleTargetRequestBuilder.build()); } else { requestList.add(singleTargetRequestBuilder.build()); } } return Collections.unmodifiableList(requestList); } private static int getHighSpeedVideoConfigsLists(int cameraId) { int optimalSizeIndex = -1; SettingsManager settingsManager = SettingsManager.getInstance(); int[] table = settingsManager.getHighSpeedVideoConfigs(cameraId); if (table == null) { Log.w(TAG, " getHighSpeedVideoConfigsLists is null"); return optimalSizeIndex; } String videoSizeString = settingsManager.getValue(SettingsManager.KEY_VIDEO_QUALITY); if (videoSizeString == null) { Log.w(TAG, " KEY_VIDEO_QUALITY is null"); return optimalSizeIndex; } android.util.Size videoSize = parsePictureSize(videoSizeString); String rateValue = settingsManager.getValue(SettingsManager.KEY_VIDEO_HIGH_FRAME_RATE); if (rateValue == null || rateValue.substring(0, 3).equals("off")) { Log.w(TAG, " KEY_VIDEO_HIGH_FRAME_RATE is null"); return optimalSizeIndex; } int frameRate = Integer.parseInt(rateValue.substring(3)); for (int i = 0; i < table.length; i += 5) { if (table[i] == videoSize.getWidth() && table[i + 1] == videoSize.getHeight() && (table[i + 2] == frameRate || table[i + 3] == frameRate)) { if (i != table.length) { optimalSizeIndex = table[i + 4]; return optimalSizeIndex; } } } return optimalSizeIndex; } private static android.util.Size parsePictureSize(String value) { int indexX = value.indexOf('x'); int width = Integer.parseInt(value.substring(0, indexX)); int height = Integer.parseInt(value.substring(indexX + 1)); return new android.util.Size(width, height); } private static CaptureRequest.Builder constructorCaptureRequestBuilder ( CameraMetadataNative requestMetadata, boolean reprocess, int SESSION_ID_NONE, CaptureRequest request, Set physicalCameraIdSet) { CaptureRequest.Builder builder = null; try { Class clazz = Class.forName("android.hardware.camera2.CaptureRequest$Builder"); // for Android O, has 3 parameters builder = (CaptureRequest.Builder) clazz.getConstructors()[0].newInstance( requestMetadata, reprocess, SESSION_ID_NONE); } catch (ClassNotFoundException e) { Log.v(TAG, "constructorCaptureRequestBuilder for AndroidO ClassNotFoundException " + e.toString()); } catch (Exception e) { Log.v(TAG, "constructorCaptureRequestBuilder for AndroidO Exception " + e.toString()); } if (builder == null) { // for Android P has 5 parameters String logicalCameraId = "-1"; try { Method getLogicalCameraId = Class.forName("android.hardware.camera2.CaptureRequest") .getMethod("getLogicalCameraId"); logicalCameraId = (String) getLogicalCameraId.invoke(request); } catch (NoSuchMethodException e) { Log.v(TAG, "constructorCaptureRequestBuilder NoSuchMethodException"+ e.toString()); } catch (Exception e) { Log.v(TAG, "constructorCaptureRequestBuilder logicalCameraId Exception" + e.toString()); } try { Class clazz = Class.forName("android.hardware.camera2.CaptureRequest$Builder"); Log.v(TAG, "logicalCameraId :" + logicalCameraId); builder = (CaptureRequest.Builder) clazz.getConstructors()[0].newInstance( requestMetadata, reprocess, SESSION_ID_NONE, logicalCameraId, physicalCameraIdSet); } catch (ClassNotFoundException e) { Log.v(TAG, "constructorCaptureRequestBuilder ClassNotFoundException"+e.toString()); } catch (Exception e) { Log.v(TAG, "constructorCaptureRequestBuilder Exception"+e.toString()); } } return builder; } public static int dip2px(Context context, float dpValue) { final float scale = context.getResources().getDisplayMetrics().density; return (int) (dpValue * scale + 0.5f); } public static void saveDialogShowConfig(Context context, String key, boolean needRequest) { SharedPreferences sp = context.getSharedPreferences(DIALOG_CONFIG, MODE_PRIVATE); SharedPreferences.Editor editor = sp.edit(); editor.putBoolean(key, needRequest); editor.apply(); } public static boolean loadDialogShowConfig(Context context, String key) { SharedPreferences sp = context.getSharedPreferences(DIALOG_CONFIG, MODE_PRIVATE); return sp.getBoolean(key, true); } public static Bitmap adjustPhotoRotation(Bitmap bm, final int orientationDegree) { Matrix m = new Matrix(); m.setRotate(orientationDegree, (float) bm.getWidth() / 2, (float) bm.getHeight() / 2); try { return Bitmap.createBitmap(bm, 0, 0, bm.getWidth(), bm.getHeight(), m, true); } catch (OutOfMemoryError ex) { ex.printStackTrace(); } return null; } }