summaryrefslogtreecommitdiffstats
path: root/src/com/android/camera/support
diff options
context:
space:
mode:
Diffstat (limited to 'src/com/android/camera/support')
-rw-r--r--src/com/android/camera/support/app/OrientationManager.java152
-rw-r--r--src/com/android/camera/support/app/StitchingChangeListener.java11
-rw-r--r--src/com/android/camera/support/common/ApiHelper.java219
-rw-r--r--src/com/android/camera/support/common/Utils.java325
-rw-r--r--src/com/android/camera/support/filtershow/crop/CropExtras.java106
-rw-r--r--src/com/android/camera/support/glrenderer/BasicTexture.java197
-rw-r--r--src/com/android/camera/support/glrenderer/BitmapTexture.java38
-rw-r--r--src/com/android/camera/support/glrenderer/ExtTexture.java45
-rw-r--r--src/com/android/camera/support/glrenderer/GLCanvas.java201
-rw-r--r--src/com/android/camera/support/glrenderer/GLId.java17
-rw-r--r--src/com/android/camera/support/glrenderer/GLPaint.java26
-rw-r--r--src/com/android/camera/support/glrenderer/NinePatchChunk.java66
-rw-r--r--src/com/android/camera/support/glrenderer/NinePatchTexture.java408
-rw-r--r--src/com/android/camera/support/glrenderer/RawTexture.java58
-rw-r--r--src/com/android/camera/support/glrenderer/ResourceTexture.java37
-rw-r--r--src/com/android/camera/support/glrenderer/Texture.java27
-rw-r--r--src/com/android/camera/support/glrenderer/UploadedTexture.java283
-rw-r--r--src/com/android/camera/support/ui/BitmapScreenNail.java45
-rw-r--r--src/com/android/camera/support/ui/OrientationSource.java6
-rw-r--r--src/com/android/camera/support/ui/ScreenNail.java21
-rw-r--r--src/com/android/camera/support/ui/SurfaceTextureScreenNail.java128
-rw-r--r--src/com/android/camera/support/util/AccessibilityUtils.java38
-rw-r--r--src/com/android/camera/support/util/MotionEventHelper.java105
23 files changed, 2559 insertions, 0 deletions
diff --git a/src/com/android/camera/support/app/OrientationManager.java b/src/com/android/camera/support/app/OrientationManager.java
new file mode 100644
index 000000000..850d46591
--- /dev/null
+++ b/src/com/android/camera/support/app/OrientationManager.java
@@ -0,0 +1,152 @@
+package com.android.camera.support.app;
+
+
+import android.app.Activity;
+import android.content.ContentResolver;
+import android.content.Context;
+import android.content.pm.ActivityInfo;
+import android.content.res.Configuration;
+import android.provider.Settings;
+import android.util.Log;
+import android.view.OrientationEventListener;
+import android.view.Surface;
+
+import com.android.camera.support.ui.OrientationSource;
+import com.android.gallery3d.common.ApiHelper;
+
+public class OrientationManager implements OrientationSource {
+ private static final String TAG = "OrientationManager";
+
+ // Orientation hysteresis amount used in rounding, in degrees
+ private static final int ORIENTATION_HYSTERESIS = 5;
+
+ private Activity mActivity;
+ private MyOrientationEventListener mOrientationListener;
+ // If the framework orientation is locked.
+ private boolean mOrientationLocked = false;
+
+ // This is true if "Settings -> Display -> Rotation Lock" is checked. We
+ // don't allow the orientation to be unlocked if the value is true.
+ private boolean mRotationLockedSetting = false;
+
+ public OrientationManager(Activity activity) {
+ mActivity = activity;
+ mOrientationListener = new MyOrientationEventListener(activity);
+ }
+
+ public void resume() {
+ ContentResolver resolver = mActivity.getContentResolver();
+ mRotationLockedSetting = Settings.System.getInt(
+ resolver, Settings.System.ACCELEROMETER_ROTATION, 0) != 1;
+ mOrientationListener.enable();
+ }
+
+ public void pause() {
+ mOrientationListener.disable();
+ }
+
+ ////////////////////////////////////////////////////////////////////////////
+ // Orientation handling
+ //
+ // We can choose to lock the framework orientation or not. If we lock the
+ // framework orientation, we calculate a a compensation value according to
+ // current device orientation and send it to listeners. If we don't lock
+ // the framework orientation, we always set the compensation value to 0.
+ ////////////////////////////////////////////////////////////////////////////
+
+ // Lock the framework orientation to the current device orientation
+ public void lockOrientation() {
+ if (mOrientationLocked) return;
+ mOrientationLocked = true;
+ if (ApiHelper.HAS_ORIENTATION_LOCK) {
+ mActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LOCKED);
+ } else {
+ mActivity.setRequestedOrientation(calculateCurrentScreenOrientation());
+ }
+ }
+
+ // Unlock the framework orientation, so it can change when the device
+ // rotates.
+ public void unlockOrientation() {
+ if (!mOrientationLocked) return;
+ mOrientationLocked = false;
+ Log.d(TAG, "unlock orientation");
+ mActivity.setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
+ }
+
+ private int calculateCurrentScreenOrientation() {
+ int displayRotation = getDisplayRotation();
+ // Display rotation >= 180 means we need to use the REVERSE landscape/portrait
+ boolean standard = displayRotation < 180;
+ if (mActivity.getResources().getConfiguration().orientation
+ == Configuration.ORIENTATION_LANDSCAPE) {
+ return standard
+ ? ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE
+ : ActivityInfo.SCREEN_ORIENTATION_REVERSE_LANDSCAPE;
+ } else {
+ if (displayRotation == 90 || displayRotation == 270) {
+ // If displayRotation = 90 or 270 then we are on a landscape
+ // device. On landscape devices, portrait is a 90 degree
+ // clockwise rotation from landscape, so we need
+ // to flip which portrait we pick as display rotation is counter clockwise
+ standard = !standard;
+ }
+ return standard
+ ? ActivityInfo.SCREEN_ORIENTATION_PORTRAIT
+ : ActivityInfo.SCREEN_ORIENTATION_REVERSE_PORTRAIT;
+ }
+ }
+
+ // This listens to the device orientation, so we can update the compensation.
+ private class MyOrientationEventListener extends OrientationEventListener {
+ public MyOrientationEventListener(Context context) {
+ super(context);
+ }
+
+ @Override
+ public void onOrientationChanged(int orientation) {
+ // We keep the last known orientation. So if the user first orient
+ // the camera then point the camera to floor or sky, we still have
+ // the correct orientation.
+ if (orientation == ORIENTATION_UNKNOWN) return;
+ orientation = roundOrientation(orientation, 0);
+ }
+ }
+
+ @Override
+ public int getDisplayRotation() {
+ return getDisplayRotation(mActivity);
+ }
+
+ @Override
+ public int getCompensation() {
+ return 0;
+ }
+
+ private static int roundOrientation(int orientation, int orientationHistory) {
+ boolean changeOrientation = false;
+ if (orientationHistory == OrientationEventListener.ORIENTATION_UNKNOWN) {
+ changeOrientation = true;
+ } else {
+ int dist = Math.abs(orientation - orientationHistory);
+ dist = Math.min(dist, 360 - dist);
+ changeOrientation = (dist >= 45 + ORIENTATION_HYSTERESIS);
+ }
+ if (changeOrientation) {
+ return ((orientation + 45) / 90 * 90) % 360;
+ }
+ return orientationHistory;
+ }
+
+ private static int getDisplayRotation(Activity activity) {
+ int rotation = activity.getWindowManager().getDefaultDisplay()
+ .getRotation();
+ switch (rotation) {
+ case Surface.ROTATION_0: return 0;
+ case Surface.ROTATION_90: return 90;
+ case Surface.ROTATION_180: return 180;
+ case Surface.ROTATION_270: return 270;
+ }
+ return 0;
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/app/StitchingChangeListener.java b/src/com/android/camera/support/app/StitchingChangeListener.java
new file mode 100644
index 000000000..f67fb3963
--- /dev/null
+++ b/src/com/android/camera/support/app/StitchingChangeListener.java
@@ -0,0 +1,11 @@
+package com.android.camera.support.app;
+
+import android.net.Uri;
+
+public interface StitchingChangeListener {
+ public void onStitchingQueued(Uri uri);
+
+ public void onStitchingResult(Uri uri);
+
+ public void onStitchingProgress(Uri uri, int progress);
+}
diff --git a/src/com/android/camera/support/common/ApiHelper.java b/src/com/android/camera/support/common/ApiHelper.java
new file mode 100644
index 000000000..e6c1f5fb6
--- /dev/null
+++ b/src/com/android/camera/support/common/ApiHelper.java
@@ -0,0 +1,219 @@
+package com.android.camera.support.common;
+
+
+import android.app.admin.DevicePolicyManager;
+import android.content.ComponentName;
+import android.hardware.Camera;
+import android.os.Build;
+import android.provider.MediaStore.MediaColumns;
+import android.view.View;
+import android.view.WindowManager;
+
+import java.lang.reflect.Field;
+
+public class ApiHelper {
+ public static interface VERSION_CODES {
+ // These value are copied from Build.VERSION_CODES
+ public static final int GINGERBREAD_MR1 = 10;
+ public static final int HONEYCOMB = 11;
+ public static final int HONEYCOMB_MR1 = 12;
+ public static final int HONEYCOMB_MR2 = 13;
+ public static final int ICE_CREAM_SANDWICH = 14;
+ public static final int ICE_CREAM_SANDWICH_MR1 = 15;
+ public static final int JELLY_BEAN = 16;
+ public static final int JELLY_BEAN_MR1 = 17;
+ public static final int JELLY_BEAN_MR2 = 18;
+ }
+
+ public static final boolean AT_LEAST_16 = Build.VERSION.SDK_INT >= 16;
+
+ public static final boolean USE_888_PIXEL_FORMAT =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean ENABLE_PHOTO_EDITOR =
+ Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH;
+
+ public static final boolean HAS_VIEW_SYSTEM_UI_FLAG_LAYOUT_STABLE =
+ hasField(View.class, "SYSTEM_UI_FLAG_LAYOUT_STABLE");
+
+ public static final boolean HAS_VIEW_SYSTEM_UI_FLAG_HIDE_NAVIGATION =
+ hasField(View.class, "SYSTEM_UI_FLAG_HIDE_NAVIGATION");
+
+ public static final boolean HAS_MEDIA_COLUMNS_WIDTH_AND_HEIGHT =
+ hasField(MediaColumns.class, "WIDTH");
+
+ public static final boolean HAS_REUSING_BITMAP_IN_BITMAP_REGION_DECODER =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_REUSING_BITMAP_IN_BITMAP_FACTORY =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_SET_BEAM_PUSH_URIS =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_SET_DEFALT_BUFFER_SIZE = hasMethod(
+ "android.graphics.SurfaceTexture", "setDefaultBufferSize",
+ int.class, int.class);
+
+ public static final boolean HAS_RELEASE_SURFACE_TEXTURE = hasMethod(
+ "android.graphics.SurfaceTexture", "release");
+
+ public static final boolean HAS_SURFACE_TEXTURE =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_MTP =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB_MR1;
+
+ public static final boolean HAS_AUTO_FOCUS_MOVE_CALLBACK =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_REMOTE_VIEWS_SERVICE =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_INTENT_EXTRA_LOCAL_ONLY =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_SET_SYSTEM_UI_VISIBILITY =
+ hasMethod(View.class, "setSystemUiVisibility", int.class);
+
+ public static final boolean HAS_FACE_DETECTION;
+ static {
+ boolean hasFaceDetection = false;
+ try {
+ Class<?> listenerClass = Class.forName(
+ "android.hardware.Camera$FaceDetectionListener");
+ hasFaceDetection =
+ hasMethod(Camera.class, "setFaceDetectionListener", listenerClass) &&
+ hasMethod(Camera.class, "startFaceDetection") &&
+ hasMethod(Camera.class, "stopFaceDetection") &&
+ hasMethod(Camera.Parameters.class, "getMaxNumDetectedFaces");
+ } catch (Throwable t) {
+ }
+ HAS_FACE_DETECTION = hasFaceDetection;
+ }
+
+ public static final boolean HAS_GET_CAMERA_DISABLED =
+ hasMethod(DevicePolicyManager.class, "getCameraDisabled", ComponentName.class);
+
+ public static final boolean HAS_MEDIA_ACTION_SOUND =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_TIME_LAPSE_RECORDING =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_ZOOM_WHEN_RECORDING =
+ Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH;
+
+ public static final boolean HAS_CAMERA_FOCUS_AREA =
+ Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH;
+
+ public static final boolean HAS_CAMERA_METERING_AREA =
+ Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH;
+
+ public static final boolean HAS_MOTION_EVENT_TRANSFORM =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_EFFECTS_RECORDING = false;
+
+ // "Background" filter does not have "context" input port in jelly bean.
+ public static final boolean HAS_EFFECTS_RECORDING_CONTEXT_INPUT =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1;
+
+ public static final boolean HAS_GET_SUPPORTED_VIDEO_SIZE =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_SET_ICON_ATTRIBUTE =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_MEDIA_PROVIDER_FILES_TABLE =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_SURFACE_TEXTURE_RECORDING =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_ACTION_BAR =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ // Ex: View.setTranslationX.
+ public static final boolean HAS_VIEW_TRANSFORM_PROPERTIES =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_CAMERA_HDR =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1;
+
+ public static final boolean HAS_OPTIONS_IN_MUTABLE =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean CAN_START_PREVIEW_IN_JPEG_CALLBACK =
+ Build.VERSION.SDK_INT >= VERSION_CODES.ICE_CREAM_SANDWICH;
+
+ public static final boolean HAS_VIEW_PROPERTY_ANIMATOR =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB_MR1;
+
+ public static final boolean HAS_POST_ON_ANIMATION =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_ANNOUNCE_FOR_ACCESSIBILITY =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_OBJECT_ANIMATION =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_GLES20_REQUIRED =
+ Build.VERSION.SDK_INT >= VERSION_CODES.HONEYCOMB;
+
+ public static final boolean HAS_ROTATION_ANIMATION =
+ hasField(WindowManager.LayoutParams.class, "rotationAnimation");
+
+ public static final boolean HAS_ORIENTATION_LOCK =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2;
+
+ public static final boolean HAS_CANCELLATION_SIGNAL =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN;
+
+ public static final boolean HAS_MEDIA_MUXER =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR2;
+
+ public static final boolean HAS_DISPLAY_LISTENER =
+ Build.VERSION.SDK_INT >= VERSION_CODES.JELLY_BEAN_MR1;
+
+ public static int getIntFieldIfExists(Class<?> klass, String fieldName,
+ Class<?> obj, int defaultVal) {
+ try {
+ Field f = klass.getDeclaredField(fieldName);
+ return f.getInt(obj);
+ } catch (Exception e) {
+ return defaultVal;
+ }
+ }
+
+ private static boolean hasField(Class<?> klass, String fieldName) {
+ try {
+ klass.getDeclaredField(fieldName);
+ return true;
+ } catch (NoSuchFieldException e) {
+ return false;
+ }
+ }
+
+ private static boolean hasMethod(String className, String methodName,
+ Class<?>... parameterTypes) {
+ try {
+ Class<?> klass = Class.forName(className);
+ klass.getDeclaredMethod(methodName, parameterTypes);
+ return true;
+ } catch (Throwable th) {
+ return false;
+ }
+ }
+
+ private static boolean hasMethod(
+ Class<?> klass, String methodName, Class<?> ... paramTypes) {
+ try {
+ klass.getDeclaredMethod(methodName, paramTypes);
+ return true;
+ } catch (NoSuchMethodException e) {
+ return false;
+ }
+ }
+}
diff --git a/src/com/android/camera/support/common/Utils.java b/src/com/android/camera/support/common/Utils.java
new file mode 100644
index 000000000..f36d6adcd
--- /dev/null
+++ b/src/com/android/camera/support/common/Utils.java
@@ -0,0 +1,325 @@
+package com.android.camera.support.common;
+
+
+import android.content.Context;
+import android.content.pm.PackageInfo;
+import android.content.pm.PackageManager.NameNotFoundException;
+import android.database.Cursor;
+import android.os.Build;
+import android.os.ParcelFileDescriptor;
+import android.text.TextUtils;
+import android.util.Log;
+
+import java.io.Closeable;
+import java.io.IOException;
+import java.io.InterruptedIOException;
+
+public class Utils {
+ private static final String TAG = "Utils";
+ private static final String DEBUG_TAG = "GalleryDebug";
+
+ private static final long POLY64REV = 0x95AC9329AC4BC9B5L;
+ private static final long INITIALCRC = 0xFFFFFFFFFFFFFFFFL;
+
+ private static long[] sCrcTable = new long[256];
+
+ private static final boolean IS_DEBUG_BUILD =
+ Build.TYPE.equals("eng") || Build.TYPE.equals("userdebug");
+
+ private static final String MASK_STRING = "********************************";
+
+ // Throws AssertionError if the input is false.
+ public static void assertTrue(boolean cond) {
+ if (!cond) {
+ throw new AssertionError();
+ }
+ }
+
+ // Throws AssertionError with the message. We had a method having the form
+ // assertTrue(boolean cond, String message, Object ... args);
+ // However a call to that method will cause memory allocation even if the
+ // condition is false (due to autoboxing generated by "Object ... args"),
+ // so we don't use that anymore.
+ public static void fail(String message, Object ... args) {
+ throw new AssertionError(
+ args.length == 0 ? message : String.format(message, args));
+ }
+
+ // Throws NullPointerException if the input is null.
+ public static <T> T checkNotNull(T object) {
+ if (object == null) throw new NullPointerException();
+ return object;
+ }
+
+ // Returns true if two input Object are both null or equal
+ // to each other.
+ public static boolean equals(Object a, Object b) {
+ return (a == b) || (a == null ? false : a.equals(b));
+ }
+
+ // Returns the next power of two.
+ // Returns the input if it is already power of 2.
+ // Throws IllegalArgumentException if the input is <= 0 or
+ // the answer overflows.
+ public static int nextPowerOf2(int n) {
+ if (n <= 0 || n > (1 << 30)) throw new IllegalArgumentException("n is invalid: " + n);
+ n -= 1;
+ n |= n >> 16;
+ n |= n >> 8;
+ n |= n >> 4;
+ n |= n >> 2;
+ n |= n >> 1;
+ return n + 1;
+ }
+
+ // Returns the previous power of two.
+ // Returns the input if it is already power of 2.
+ // Throws IllegalArgumentException if the input is <= 0
+ public static int prevPowerOf2(int n) {
+ if (n <= 0) throw new IllegalArgumentException();
+ return Integer.highestOneBit(n);
+ }
+
+ // Returns the input value x clamped to the range [min, max].
+ public static int clamp(int x, int min, int max) {
+ if (x > max) return max;
+ if (x < min) return min;
+ return x;
+ }
+
+ // Returns the input value x clamped to the range [min, max].
+ public static float clamp(float x, float min, float max) {
+ if (x > max) return max;
+ if (x < min) return min;
+ return x;
+ }
+
+ // Returns the input value x clamped to the range [min, max].
+ public static long clamp(long x, long min, long max) {
+ if (x > max) return max;
+ if (x < min) return min;
+ return x;
+ }
+
+ public static boolean isOpaque(int color) {
+ return color >>> 24 == 0xFF;
+ }
+
+ public static void swap(int[] array, int i, int j) {
+ int temp = array[i];
+ array[i] = array[j];
+ array[j] = temp;
+ }
+
+ /**
+ * A function thats returns a 64-bit crc for string
+ *
+ * @param in input string
+ * @return a 64-bit crc value
+ */
+ public static final long crc64Long(String in) {
+ if (in == null || in.length() == 0) {
+ return 0;
+ }
+ return crc64Long(getBytes(in));
+ }
+
+ static {
+ // http://bioinf.cs.ucl.ac.uk/downloads/crc64/crc64.c
+ long part;
+ for (int i = 0; i < 256; i++) {
+ part = i;
+ for (int j = 0; j < 8; j++) {
+ long x = ((int) part & 1) != 0 ? POLY64REV : 0;
+ part = (part >> 1) ^ x;
+ }
+ sCrcTable[i] = part;
+ }
+ }
+
+ public static final long crc64Long(byte[] buffer) {
+ long crc = INITIALCRC;
+ for (int k = 0, n = buffer.length; k < n; ++k) {
+ crc = sCrcTable[(((int) crc) ^ buffer[k]) & 0xff] ^ (crc >> 8);
+ }
+ return crc;
+ }
+
+ public static byte[] getBytes(String in) {
+ byte[] result = new byte[in.length() * 2];
+ int output = 0;
+ for (char ch : in.toCharArray()) {
+ result[output++] = (byte) (ch & 0xFF);
+ result[output++] = (byte) (ch >> 8);
+ }
+ return result;
+ }
+
+ public static void closeSilently(Closeable c) {
+ if (c == null) return;
+ try {
+ c.close();
+ } catch (IOException t) {
+ Log.w(TAG, "close fail ", t);
+ }
+ }
+
+ public static int compare(long a, long b) {
+ return a < b ? -1 : a == b ? 0 : 1;
+ }
+
+ public static int ceilLog2(float value) {
+ int i;
+ for (i = 0; i < 31; i++) {
+ if ((1 << i) >= value) break;
+ }
+ return i;
+ }
+
+ public static int floorLog2(float value) {
+ int i;
+ for (i = 0; i < 31; i++) {
+ if ((1 << i) > value) break;
+ }
+ return i - 1;
+ }
+
+ public static void closeSilently(ParcelFileDescriptor fd) {
+ try {
+ if (fd != null) fd.close();
+ } catch (Throwable t) {
+ Log.w(TAG, "fail to close", t);
+ }
+ }
+
+ public static void closeSilently(Cursor cursor) {
+ try {
+ if (cursor != null) cursor.close();
+ } catch (Throwable t) {
+ Log.w(TAG, "fail to close", t);
+ }
+ }
+
+ public static float interpolateAngle(
+ float source, float target, float progress) {
+ // interpolate the angle from source to target
+ // We make the difference in the range of [-179, 180], this is the
+ // shortest path to change source to target.
+ float diff = target - source;
+ if (diff < 0) diff += 360f;
+ if (diff > 180) diff -= 360f;
+
+ float result = source + diff * progress;
+ return result < 0 ? result + 360f : result;
+ }
+
+ public static float interpolateScale(
+ float source, float target, float progress) {
+ return source + progress * (target - source);
+ }
+
+ public static String ensureNotNull(String value) {
+ return value == null ? "" : value;
+ }
+
+ public static float parseFloatSafely(String content, float defaultValue) {
+ if (content == null) return defaultValue;
+ try {
+ return Float.parseFloat(content);
+ } catch (NumberFormatException e) {
+ return defaultValue;
+ }
+ }
+
+ public static int parseIntSafely(String content, int defaultValue) {
+ if (content == null) return defaultValue;
+ try {
+ return Integer.parseInt(content);
+ } catch (NumberFormatException e) {
+ return defaultValue;
+ }
+ }
+
+ public static boolean isNullOrEmpty(String exifMake) {
+ return TextUtils.isEmpty(exifMake);
+ }
+
+ public static void waitWithoutInterrupt(Object object) {
+ try {
+ object.wait();
+ } catch (InterruptedException e) {
+ Log.w(TAG, "unexpected interrupt: " + object);
+ }
+ }
+
+ public static boolean handleInterrruptedException(Throwable e) {
+ // A helper to deal with the interrupt exception
+ // If an interrupt detected, we will setup the bit again.
+ if (e instanceof InterruptedIOException
+ || e instanceof InterruptedException) {
+ Thread.currentThread().interrupt();
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * @return String with special XML characters escaped.
+ */
+ public static String escapeXml(String s) {
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0, len = s.length(); i < len; ++i) {
+ char c = s.charAt(i);
+ switch (c) {
+ case '<': sb.append("&lt;"); break;
+ case '>': sb.append("&gt;"); break;
+ case '\"': sb.append("&quot;"); break;
+ case '\'': sb.append("&#039;"); break;
+ case '&': sb.append("&amp;"); break;
+ default: sb.append(c);
+ }
+ }
+ return sb.toString();
+ }
+
+ public static String getUserAgent(Context context) {
+ PackageInfo packageInfo;
+ try {
+ packageInfo = context.getPackageManager().getPackageInfo(context.getPackageName(), 0);
+ } catch (NameNotFoundException e) {
+ throw new IllegalStateException("getPackageInfo failed");
+ }
+ return String.format("%s/%s; %s/%s/%s/%s; %s/%s/%s",
+ packageInfo.packageName,
+ packageInfo.versionName,
+ Build.BRAND,
+ Build.DEVICE,
+ Build.MODEL,
+ Build.ID,
+ Build.VERSION.SDK_INT,
+ Build.VERSION.RELEASE,
+ Build.VERSION.INCREMENTAL);
+ }
+
+ public static String[] copyOf(String[] source, int newSize) {
+ String[] result = new String[newSize];
+ newSize = Math.min(source.length, newSize);
+ System.arraycopy(source, 0, result, 0, newSize);
+ return result;
+ }
+
+ // Mask information for debugging only. It returns <code>info.toString()</code> directly
+ // for debugging build (i.e., 'eng' and 'userdebug') and returns a mask ("****")
+ // in release build to protect the information (e.g. for privacy issue).
+ public static String maskDebugInfo(Object info) {
+ if (info == null) return null;
+ String s = info.toString();
+ int length = Math.min(s.length(), MASK_STRING.length());
+ return IS_DEBUG_BUILD ? s : MASK_STRING.substring(0, length);
+ }
+
+ // This method should be ONLY used for debugging.
+ public static void debug(String message, Object ... args) {
+ Log.v(DEBUG_TAG, String.format(message, args));
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/filtershow/crop/CropExtras.java b/src/com/android/camera/support/filtershow/crop/CropExtras.java
new file mode 100644
index 000000000..6250a3573
--- /dev/null
+++ b/src/com/android/camera/support/filtershow/crop/CropExtras.java
@@ -0,0 +1,106 @@
+package com.android.camera.support.filtershow.crop;
+
+
+import android.net.Uri;
+
+public class CropExtras {
+
+ public static final String KEY_CROPPED_RECT = "cropped-rect";
+ public static final String KEY_OUTPUT_X = "outputX";
+ public static final String KEY_OUTPUT_Y = "outputY";
+ public static final String KEY_SCALE = "scale";
+ public static final String KEY_SCALE_UP_IF_NEEDED = "scaleUpIfNeeded";
+ public static final String KEY_ASPECT_X = "aspectX";
+ public static final String KEY_ASPECT_Y = "aspectY";
+ public static final String KEY_SET_AS_WALLPAPER = "set-as-wallpaper";
+ public static final String KEY_RETURN_DATA = "return-data";
+ public static final String KEY_DATA = "data";
+ public static final String KEY_SPOTLIGHT_X = "spotlightX";
+ public static final String KEY_SPOTLIGHT_Y = "spotlightY";
+ public static final String KEY_SHOW_WHEN_LOCKED = "showWhenLocked";
+ public static final String KEY_OUTPUT_FORMAT = "outputFormat";
+
+ private int mOutputX = 0;
+ private int mOutputY = 0;
+ private boolean mScaleUp = true;
+ private int mAspectX = 0;
+ private int mAspectY = 0;
+ private boolean mSetAsWallpaper = false;
+ private boolean mReturnData = false;
+ private Uri mExtraOutput = null;
+ private String mOutputFormat = null;
+ private boolean mShowWhenLocked = false;
+ private float mSpotlightX = 0;
+ private float mSpotlightY = 0;
+
+ public CropExtras(int outputX, int outputY, boolean scaleUp, int aspectX, int aspectY,
+ boolean setAsWallpaper, boolean returnData, Uri extraOutput, String outputFormat,
+ boolean showWhenLocked, float spotlightX, float spotlightY) {
+ mOutputX = outputX;
+ mOutputY = outputY;
+ mScaleUp = scaleUp;
+ mAspectX = aspectX;
+ mAspectY = aspectY;
+ mSetAsWallpaper = setAsWallpaper;
+ mReturnData = returnData;
+ mExtraOutput = extraOutput;
+ mOutputFormat = outputFormat;
+ mShowWhenLocked = showWhenLocked;
+ mSpotlightX = spotlightX;
+ mSpotlightY = spotlightY;
+ }
+
+ public CropExtras(CropExtras c) {
+ this(c.mOutputX, c.mOutputY, c.mScaleUp, c.mAspectX, c.mAspectY, c.mSetAsWallpaper,
+ c.mReturnData, c.mExtraOutput, c.mOutputFormat, c.mShowWhenLocked,
+ c.mSpotlightX, c.mSpotlightY);
+ }
+
+ public int getOutputX() {
+ return mOutputX;
+ }
+
+ public int getOutputY() {
+ return mOutputY;
+ }
+
+ public boolean getScaleUp() {
+ return mScaleUp;
+ }
+
+ public int getAspectX() {
+ return mAspectX;
+ }
+
+ public int getAspectY() {
+ return mAspectY;
+ }
+
+ public boolean getSetAsWallpaper() {
+ return mSetAsWallpaper;
+ }
+
+ public boolean getReturnData() {
+ return mReturnData;
+ }
+
+ public Uri getExtraOutput() {
+ return mExtraOutput;
+ }
+
+ public String getOutputFormat() {
+ return mOutputFormat;
+ }
+
+ public boolean getShowWhenLocked() {
+ return mShowWhenLocked;
+ }
+
+ public float getSpotlightX() {
+ return mSpotlightX;
+ }
+
+ public float getSpotlightY() {
+ return mSpotlightY;
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/glrenderer/BasicTexture.java b/src/com/android/camera/support/glrenderer/BasicTexture.java
new file mode 100644
index 000000000..39cc519c2
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/BasicTexture.java
@@ -0,0 +1,197 @@
+package com.android.camera.support.glrenderer;
+
+
+import java.util.WeakHashMap;
+
+import com.android.camera.support.common.Utils;
+
+import android.util.Log;
+
+// BasicTexture is a Texture corresponds to a real GL texture.
+// The state of a BasicTexture indicates whether its data is loaded to GL memory.
+// If a BasicTexture is loaded into GL memory, it has a GL texture id.
+public abstract class BasicTexture implements Texture {
+
+ @SuppressWarnings("unused")
+ private static final String TAG = "BasicTexture";
+ protected static final int UNSPECIFIED = -1;
+
+ protected static final int STATE_UNLOADED = 0;
+ protected static final int STATE_LOADED = 1;
+ protected static final int STATE_ERROR = -1;
+
+ // Log a warning if a texture is larger along a dimension
+ private static final int MAX_TEXTURE_SIZE = 4096;
+
+ protected int mId = -1;
+ protected int mState;
+
+ protected int mWidth = UNSPECIFIED;
+ protected int mHeight = UNSPECIFIED;
+
+ protected int mTextureWidth;
+ protected int mTextureHeight;
+
+ private boolean mHasBorder;
+
+ protected GLCanvas mCanvasRef = null;
+ private static WeakHashMap<BasicTexture, Object> sAllTextures
+ = new WeakHashMap<BasicTexture, Object>();
+ private static ThreadLocal sInFinalizer = new ThreadLocal();
+
+ protected BasicTexture(GLCanvas canvas, int id, int state) {
+ setAssociatedCanvas(canvas);
+ mId = id;
+ mState = state;
+ synchronized (sAllTextures) {
+ sAllTextures.put(this, null);
+ }
+ }
+
+ protected BasicTexture() {
+ this(null, 0, STATE_UNLOADED);
+ }
+
+ protected void setAssociatedCanvas(GLCanvas canvas) {
+ mCanvasRef = canvas;
+ }
+
+ /**
+ * Sets the content size of this texture. In OpenGL, the actual texture
+ * size must be of power of 2, the size of the content may be smaller.
+ */
+ public void setSize(int width, int height) {
+ mWidth = width;
+ mHeight = height;
+ mTextureWidth = width > 0 ? Utils.nextPowerOf2(width) : 0;
+ mTextureHeight = height > 0 ? Utils.nextPowerOf2(height) : 0;
+ if (mTextureWidth > MAX_TEXTURE_SIZE || mTextureHeight > MAX_TEXTURE_SIZE) {
+ Log.w(TAG, String.format("texture is too large: %d x %d",
+ mTextureWidth, mTextureHeight), new Exception());
+ }
+ }
+
+ public boolean isFlippedVertically() {
+ return false;
+ }
+
+ public int getId() {
+ return mId;
+ }
+
+ @Override
+ public int getWidth() {
+ return mWidth;
+ }
+
+ @Override
+ public int getHeight() {
+ return mHeight;
+ }
+
+ // Returns the width rounded to the next power of 2.
+ public int getTextureWidth() {
+ return mTextureWidth;
+ }
+
+ // Returns the height rounded to the next power of 2.
+ public int getTextureHeight() {
+ return mTextureHeight;
+ }
+
+ // Returns true if the texture has one pixel transparent border around the
+ // actual content. This is used to avoid jigged edges.
+ //
+ // The jigged edges appear because we use GL_CLAMP_TO_EDGE for texture wrap
+ // mode (GL_CLAMP is not available in OpenGL ES), so a pixel partially
+ // covered by the texture will use the color of the edge texel. If we add
+ // the transparent border, the color of the edge texel will be mixed with
+ // appropriate amount of transparent.
+ //
+ // Currently our background is black, so we can draw the thumbnails without
+ // enabling blending.
+ public boolean hasBorder() {
+ return mHasBorder;
+ }
+
+ protected void setBorder(boolean hasBorder) {
+ mHasBorder = hasBorder;
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, int x, int y) {
+ canvas.drawTexture(this, x, y, getWidth(), getHeight());
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, int x, int y, int w, int h) {
+ canvas.drawTexture(this, x, y, w, h);
+ }
+
+ // onBind is called before GLCanvas binds this texture.
+ // It should make sure the data is uploaded to GL memory.
+ abstract protected boolean onBind(GLCanvas canvas);
+
+ // Returns the GL texture target for this texture (e.g. GL_TEXTURE_2D).
+ abstract protected int getTarget();
+
+ public boolean isLoaded() {
+ return mState == STATE_LOADED;
+ }
+
+ // recycle() is called when the texture will never be used again,
+ // so it can free all resources.
+ public void recycle() {
+ freeResource();
+ }
+
+ // yield() is called when the texture will not be used temporarily,
+ // so it can free some resources.
+ // The default implementation unloads the texture from GL memory, so
+ // the subclass should make sure it can reload the texture to GL memory
+ // later, or it will have to override this method.
+ public void yield() {
+ freeResource();
+ }
+
+ private void freeResource() {
+ GLCanvas canvas = mCanvasRef;
+ if (canvas != null && mId != -1) {
+ canvas.unloadTexture(this);
+ mId = -1; // Don't free it again.
+ }
+ mState = STATE_UNLOADED;
+ setAssociatedCanvas(null);
+ }
+
+ @Override
+ protected void finalize() {
+ sInFinalizer.set(BasicTexture.class);
+ recycle();
+ sInFinalizer.set(null);
+ }
+
+ // This is for deciding if we can call Bitmap's recycle().
+ // We cannot call Bitmap's recycle() in finalizer because at that point
+ // the finalizer of Bitmap may already be called so recycle() will crash.
+ public static boolean inFinalizer() {
+ return sInFinalizer.get() != null;
+ }
+
+ public static void yieldAllTextures() {
+ synchronized (sAllTextures) {
+ for (BasicTexture t : sAllTextures.keySet()) {
+ t.yield();
+ }
+ }
+ }
+
+ public static void invalidateAllTextures() {
+ synchronized (sAllTextures) {
+ for (BasicTexture t : sAllTextures.keySet()) {
+ t.mState = STATE_UNLOADED;
+ t.setAssociatedCanvas(null);
+ }
+ }
+ }
+}
diff --git a/src/com/android/camera/support/glrenderer/BitmapTexture.java b/src/com/android/camera/support/glrenderer/BitmapTexture.java
new file mode 100644
index 000000000..b62a44436
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/BitmapTexture.java
@@ -0,0 +1,38 @@
+package com.android.camera.support.glrenderer;
+
+import android.graphics.Bitmap;
+
+import junit.framework.Assert;
+
+// BitmapTexture is a texture whose content is specified by a fixed Bitmap.
+//
+// The texture does not own the Bitmap. The user should make sure the Bitmap
+// is valid during the texture's lifetime. When the texture is recycled, it
+// does not free the Bitmap.
+public class BitmapTexture extends UploadedTexture {
+ protected Bitmap mContentBitmap;
+
+ public BitmapTexture(Bitmap bitmap) {
+ this(bitmap, false);
+ }
+
+ public BitmapTexture(Bitmap bitmap, boolean hasBorder) {
+ super(hasBorder);
+ Assert.assertTrue(bitmap != null && !bitmap.isRecycled());
+ mContentBitmap = bitmap;
+ }
+
+ @Override
+ protected void onFreeBitmap(Bitmap bitmap) {
+ // Do nothing.
+ }
+
+ @Override
+ protected Bitmap onGetBitmap() {
+ return mContentBitmap;
+ }
+
+ public Bitmap getBitmap() {
+ return mContentBitmap;
+ }
+}
diff --git a/src/com/android/camera/support/glrenderer/ExtTexture.java b/src/com/android/camera/support/glrenderer/ExtTexture.java
new file mode 100644
index 000000000..121e17d5f
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/ExtTexture.java
@@ -0,0 +1,45 @@
+package com.android.camera.support.glrenderer;
+
+
+//ExtTexture is a texture whose content comes from a external texture.
+//Before drawing, setSize() should be called.
+public class ExtTexture extends BasicTexture {
+
+ private int mTarget;
+
+ public ExtTexture(GLCanvas canvas, int target) {
+ GLId glId = canvas.getGLId();
+ mId = glId.generateTexture();
+ mTarget = target;
+ }
+
+ private void uploadToCanvas(GLCanvas canvas) {
+ canvas.setTextureParameters(this);
+ setAssociatedCanvas(canvas);
+ mState = STATE_LOADED;
+ }
+
+ @Override
+ protected boolean onBind(GLCanvas canvas) {
+ if (!isLoaded()) {
+ uploadToCanvas(canvas);
+ }
+
+ return true;
+ }
+
+ @Override
+ public int getTarget() {
+ return mTarget;
+ }
+
+ @Override
+ public boolean isOpaque() {
+ return true;
+ }
+
+ @Override
+ public void yield() {
+ // we cannot free the texture because we have no backup.
+ }
+}
diff --git a/src/com/android/camera/support/glrenderer/GLCanvas.java b/src/com/android/camera/support/glrenderer/GLCanvas.java
new file mode 100644
index 000000000..1e4ec2f16
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/GLCanvas.java
@@ -0,0 +1,201 @@
+package com.android.camera.support.glrenderer;
+
+import android.graphics.Bitmap;
+import android.graphics.Rect;
+import android.graphics.RectF;
+
+import javax.microedition.khronos.opengles.GL11;
+
+//
+// GLCanvas gives a convenient interface to draw using OpenGL.
+//
+// When a rectangle is specified in this interface, it means the region
+// [x, x+width) * [y, y+height)
+//
+public interface GLCanvas {
+
+ public GLId getGLId();
+
+ // Tells GLCanvas the size of the underlying GL surface. This should be
+ // called before first drawing and when the size of GL surface is changed.
+ // This is called by GLRoot and should not be called by the clients
+ // who only want to draw on the GLCanvas. Both width and height must be
+ // nonnegative.
+ public abstract void setSize(int width, int height);
+
+ // Clear the drawing buffers. This should only be used by GLRoot.
+ public abstract void clearBuffer();
+
+ public abstract void clearBuffer(float[] argb);
+
+ // Sets and gets the current alpha, alpha must be in [0, 1].
+ public abstract void setAlpha(float alpha);
+
+ public abstract float getAlpha();
+
+ // (current alpha) = (current alpha) * alpha
+ public abstract void multiplyAlpha(float alpha);
+
+ // Change the current transform matrix.
+ public abstract void translate(float x, float y, float z);
+
+ public abstract void translate(float x, float y);
+
+ public abstract void scale(float sx, float sy, float sz);
+
+ public abstract void rotate(float angle, float x, float y, float z);
+
+ public abstract void multiplyMatrix(float[] mMatrix, int offset);
+
+ // Pushes the configuration state (matrix, and alpha) onto
+ // a private stack.
+ public abstract void save();
+
+ // Same as save(), but only save those specified in saveFlags.
+ public abstract void save(int saveFlags);
+
+ public static final int SAVE_FLAG_ALL = 0xFFFFFFFF;
+ public static final int SAVE_FLAG_ALPHA = 0x01;
+ public static final int SAVE_FLAG_MATRIX = 0x02;
+
+ // Pops from the top of the stack as current configuration state (matrix,
+ // alpha, and clip). This call balances a previous call to save(), and is
+ // used to remove all modifications to the configuration state since the
+ // last save call.
+ public abstract void restore();
+
+ // Draws a line using the specified paint from (x1, y1) to (x2, y2).
+ // (Both end points are included).
+ public abstract void drawLine(float x1, float y1, float x2, float y2, GLPaint paint);
+
+ // Draws a rectangle using the specified paint from (x1, y1) to (x2, y2).
+ // (Both end points are included).
+ public abstract void drawRect(float x1, float y1, float x2, float y2, GLPaint paint);
+
+ // Fills the specified rectangle with the specified color.
+ public abstract void fillRect(float x, float y, float width, float height, int color);
+
+ // Draws a texture to the specified rectangle.
+ public abstract void drawTexture(
+ BasicTexture texture, int x, int y, int width, int height);
+
+ public abstract void drawMesh(BasicTexture tex, int x, int y, int xyBuffer,
+ int uvBuffer, int indexBuffer, int indexCount);
+
+ // Draws the source rectangle part of the texture to the target rectangle.
+ public abstract void drawTexture(BasicTexture texture, RectF source, RectF target);
+
+ // Draw a texture with a specified texture transform.
+ public abstract void drawTexture(BasicTexture texture, float[] mTextureTransform,
+ int x, int y, int w, int h);
+
+ // Draw two textures to the specified rectangle. The actual texture used is
+ // from * (1 - ratio) + to * ratio
+ // The two textures must have the same size.
+ public abstract void drawMixed(BasicTexture from, int toColor,
+ float ratio, int x, int y, int w, int h);
+
+ // Draw a region of a texture and a specified color to the specified
+ // rectangle. The actual color used is from * (1 - ratio) + to * ratio.
+ // The region of the texture is defined by parameter "src". The target
+ // rectangle is specified by parameter "target".
+ public abstract void drawMixed(BasicTexture from, int toColor,
+ float ratio, RectF src, RectF target);
+
+ // Unloads the specified texture from the canvas. The resource allocated
+ // to draw the texture will be released. The specified texture will return
+ // to the unloaded state. This function should be called only from
+ // BasicTexture or its descendant
+ public abstract boolean unloadTexture(BasicTexture texture);
+
+ // Delete the specified buffer object, similar to unloadTexture.
+ public abstract void deleteBuffer(int bufferId);
+
+ // Delete the textures and buffers in GL side. This function should only be
+ // called in the GL thread.
+ public abstract void deleteRecycledResources();
+
+ // Dump statistics information and clear the counters. For debug only.
+ public abstract void dumpStatisticsAndClear();
+
+ public abstract void beginRenderTarget(RawTexture texture);
+
+ public abstract void endRenderTarget();
+
+ /**
+ * Sets texture parameters to use GL_CLAMP_TO_EDGE for both
+ * GL_TEXTURE_WRAP_S and GL_TEXTURE_WRAP_T. Sets texture parameters to be
+ * GL_LINEAR for GL_TEXTURE_MIN_FILTER and GL_TEXTURE_MAG_FILTER.
+ * bindTexture() must be called prior to this.
+ *
+ * @param texture The texture to set parameters on.
+ */
+ public abstract void setTextureParameters(BasicTexture texture);
+
+ /**
+ * Initializes the texture to a size by calling texImage2D on it.
+ *
+ * @param texture The texture to initialize the size.
+ * @param format The texture format (e.g. GL_RGBA)
+ * @param type The texture type (e.g. GL_UNSIGNED_BYTE)
+ */
+ public abstract void initializeTextureSize(BasicTexture texture, int format, int type);
+
+ /**
+ * Initializes the texture to a size by calling texImage2D on it.
+ *
+ * @param texture The texture to initialize the size.
+ * @param bitmap The bitmap to initialize the bitmap with.
+ */
+ public abstract void initializeTexture(BasicTexture texture, Bitmap bitmap);
+
+ /**
+ * Calls glTexSubImage2D to upload a bitmap to the texture.
+ *
+ * @param texture The target texture to write to.
+ * @param xOffset Specifies a texel offset in the x direction within the
+ * texture array.
+ * @param yOffset Specifies a texel offset in the y direction within the
+ * texture array.
+ * @param format The texture format (e.g. GL_RGBA)
+ * @param type The texture type (e.g. GL_UNSIGNED_BYTE)
+ */
+ public abstract void texSubImage2D(BasicTexture texture, int xOffset, int yOffset,
+ Bitmap bitmap,
+ int format, int type);
+
+ /**
+ * Generates buffers and uploads the buffer data.
+ *
+ * @param buffer The buffer to upload
+ * @return The buffer ID that was generated.
+ */
+ public abstract int uploadBuffer(java.nio.FloatBuffer buffer);
+
+ /**
+ * Generates buffers and uploads the element array buffer data.
+ *
+ * @param buffer The buffer to upload
+ * @return The buffer ID that was generated.
+ */
+ public abstract int uploadBuffer(java.nio.ByteBuffer buffer);
+
+ /**
+ * After LightCycle makes GL calls, this method is called to restore the GL
+ * configuration to the one expected by GLCanvas.
+ */
+ public abstract void recoverFromLightCycle();
+
+ /**
+ * Gets the bounds given by x, y, width, and height as well as the internal
+ * matrix state. There is no special handling for non-90-degree rotations.
+ * It only considers the lower-left and upper-right corners as the bounds.
+ *
+ * @param bounds The output bounds to write to.
+ * @param x The left side of the input rectangle.
+ * @param y The bottom of the input rectangle.
+ * @param width The width of the input rectangle.
+ * @param height The height of the input rectangle.
+ */
+ public abstract void getBounds(Rect bounds, int x, int y, int width, int height);
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/glrenderer/GLId.java b/src/com/android/camera/support/glrenderer/GLId.java
new file mode 100644
index 000000000..695343b12
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/GLId.java
@@ -0,0 +1,17 @@
+package com.android.camera.support.glrenderer;
+
+import javax.microedition.khronos.opengles.GL11;
+import javax.microedition.khronos.opengles.GL11ExtensionPack;
+
+// This mimics corresponding GL functions.
+public interface GLId {
+ public int generateTexture();
+
+ public void glGenBuffers(int n, int[] buffers, int offset);
+
+ public void glDeleteTextures(GL11 gl, int n, int[] textures, int offset);
+
+ public void glDeleteBuffers(GL11 gl, int n, int[] buffers, int offset);
+
+ public void glDeleteFramebuffers(GL11ExtensionPack gl11ep, int n, int[] buffers, int offset);
+}
diff --git a/src/com/android/camera/support/glrenderer/GLPaint.java b/src/com/android/camera/support/glrenderer/GLPaint.java
new file mode 100644
index 000000000..7c6af73eb
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/GLPaint.java
@@ -0,0 +1,26 @@
+package com.android.camera.support.glrenderer;
+
+
+import junit.framework.Assert;
+
+public class GLPaint {
+ private float mLineWidth = 1f;
+ private int mColor = 0;
+
+ public void setColor(int color) {
+ mColor = color;
+ }
+
+ public int getColor() {
+ return mColor;
+ }
+
+ public void setLineWidth(float width) {
+ Assert.assertTrue(width >= 0);
+ mLineWidth = width;
+ }
+
+ public float getLineWidth() {
+ return mLineWidth;
+ }
+}
diff --git a/src/com/android/camera/support/glrenderer/NinePatchChunk.java b/src/com/android/camera/support/glrenderer/NinePatchChunk.java
new file mode 100644
index 000000000..f7deb1864
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/NinePatchChunk.java
@@ -0,0 +1,66 @@
+package com.android.camera.support.glrenderer;
+
+import android.graphics.Rect;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+// See "frameworks/base/include/utils/ResourceTypes.h" for the format of
+// NinePatch chunk.
+class NinePatchChunk {
+
+ public static final int NO_COLOR = 0x00000001;
+ public static final int TRANSPARENT_COLOR = 0x00000000;
+
+ public Rect mPaddings = new Rect();
+
+ public int mDivX[];
+ public int mDivY[];
+ public int mColor[];
+
+ private static void readIntArray(int[] data, ByteBuffer buffer) {
+ for (int i = 0, n = data.length; i < n; ++i) {
+ data[i] = buffer.getInt();
+ }
+ }
+
+ private static void checkDivCount(int length) {
+ if (length == 0 || (length & 0x01) != 0) {
+ throw new RuntimeException("invalid nine-patch: " + length);
+ }
+ }
+
+ public static NinePatchChunk deserialize(byte[] data) {
+ ByteBuffer byteBuffer =
+ ByteBuffer.wrap(data).order(ByteOrder.nativeOrder());
+
+ byte wasSerialized = byteBuffer.get();
+ if (wasSerialized == 0) return null;
+
+ NinePatchChunk chunk = new NinePatchChunk();
+ chunk.mDivX = new int[byteBuffer.get()];
+ chunk.mDivY = new int[byteBuffer.get()];
+ chunk.mColor = new int[byteBuffer.get()];
+
+ checkDivCount(chunk.mDivX.length);
+ checkDivCount(chunk.mDivY.length);
+
+ // skip 8 bytes
+ byteBuffer.getInt();
+ byteBuffer.getInt();
+
+ chunk.mPaddings.left = byteBuffer.getInt();
+ chunk.mPaddings.right = byteBuffer.getInt();
+ chunk.mPaddings.top = byteBuffer.getInt();
+ chunk.mPaddings.bottom = byteBuffer.getInt();
+
+ // skip 4 bytes
+ byteBuffer.getInt();
+
+ readIntArray(chunk.mDivX, byteBuffer);
+ readIntArray(chunk.mDivY, byteBuffer);
+ readIntArray(chunk.mColor, byteBuffer);
+
+ return chunk;
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/glrenderer/NinePatchTexture.java b/src/com/android/camera/support/glrenderer/NinePatchTexture.java
new file mode 100644
index 000000000..c69afd682
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/NinePatchTexture.java
@@ -0,0 +1,408 @@
+package com.android.camera.support.glrenderer;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+import android.graphics.Rect;
+
+import com.android.camera.support.common.Utils;
+
+// NinePatchTexture is a texture backed by a NinePatch resource.
+//
+// getPaddings() returns paddings specified in the NinePatch.
+// getNinePatchChunk() returns the layout data specified in the NinePatch.
+//
+public class NinePatchTexture extends ResourceTexture {
+ @SuppressWarnings("unused")
+ private static final String TAG = "NinePatchTexture";
+ private NinePatchChunk mChunk;
+ private SmallCache<NinePatchInstance> mInstanceCache
+ = new SmallCache<NinePatchInstance>();
+
+ public NinePatchTexture(Context context, int resId) {
+ super(context, resId);
+ }
+
+ @Override
+ protected Bitmap onGetBitmap() {
+ if (mBitmap != null) return mBitmap;
+
+ BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inPreferredConfig = Bitmap.Config.ARGB_8888;
+ Bitmap bitmap = BitmapFactory.decodeResource(
+ mContext.getResources(), mResId, options);
+ mBitmap = bitmap;
+ setSize(bitmap.getWidth(), bitmap.getHeight());
+ byte[] chunkData = bitmap.getNinePatchChunk();
+ mChunk = chunkData == null
+ ? null
+ : NinePatchChunk.deserialize(bitmap.getNinePatchChunk());
+ if (mChunk == null) {
+ throw new RuntimeException("invalid nine-patch image: " + mResId);
+ }
+ return bitmap;
+ }
+
+ public Rect getPaddings() {
+ // get the paddings from nine patch
+ if (mChunk == null) onGetBitmap();
+ return mChunk.mPaddings;
+ }
+
+ public NinePatchChunk getNinePatchChunk() {
+ if (mChunk == null) onGetBitmap();
+ return mChunk;
+ }
+
+ // This is a simple cache for a small number of things. Linear search
+ // is used because the cache is small. It also tries to remove less used
+ // item when the cache is full by moving the often-used items to the front.
+ private static class SmallCache<V> {
+ private static final int CACHE_SIZE = 16;
+ private static final int CACHE_SIZE_START_MOVE = CACHE_SIZE / 2;
+ private int[] mKey = new int[CACHE_SIZE];
+ private V[] mValue = (V[]) new Object[CACHE_SIZE];
+ private int mCount; // number of items in this cache
+
+ // Puts a value into the cache. If the cache is full, also returns
+ // a less used item, otherwise returns null.
+ public V put(int key, V value) {
+ if (mCount == CACHE_SIZE) {
+ V old = mValue[CACHE_SIZE - 1]; // remove the last item
+ mKey[CACHE_SIZE - 1] = key;
+ mValue[CACHE_SIZE - 1] = value;
+ return old;
+ } else {
+ mKey[mCount] = key;
+ mValue[mCount] = value;
+ mCount++;
+ return null;
+ }
+ }
+
+ public V get(int key) {
+ for (int i = 0; i < mCount; i++) {
+ if (mKey[i] == key) {
+ // Move the accessed item one position to the front, so it
+ // will less likely to be removed when cache is full. Only
+ // do this if the cache is starting to get full.
+ if (mCount > CACHE_SIZE_START_MOVE && i > 0) {
+ int tmpKey = mKey[i];
+ mKey[i] = mKey[i - 1];
+ mKey[i - 1] = tmpKey;
+
+ V tmpValue = mValue[i];
+ mValue[i] = mValue[i - 1];
+ mValue[i - 1] = tmpValue;
+ }
+ return mValue[i];
+ }
+ }
+ return null;
+ }
+
+ public void clear() {
+ for (int i = 0; i < mCount; i++) {
+ mValue[i] = null; // make sure it's can be garbage-collected.
+ }
+ mCount = 0;
+ }
+
+ public int size() {
+ return mCount;
+ }
+
+ public V valueAt(int i) {
+ return mValue[i];
+ }
+ }
+
+ private NinePatchInstance findInstance(GLCanvas canvas, int w, int h) {
+ int key = w;
+ key = (key << 16) | h;
+ NinePatchInstance instance = mInstanceCache.get(key);
+
+ if (instance == null) {
+ instance = new NinePatchInstance(this, w, h);
+ NinePatchInstance removed = mInstanceCache.put(key, instance);
+ if (removed != null) {
+ removed.recycle(canvas);
+ }
+ }
+
+ return instance;
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, int x, int y, int w, int h) {
+ if (!isLoaded()) {
+ mInstanceCache.clear();
+ }
+
+ if (w != 0 && h != 0) {
+ findInstance(canvas, w, h).draw(canvas, this, x, y);
+ }
+ }
+
+ @Override
+ public void recycle() {
+ super.recycle();
+ GLCanvas canvas = mCanvasRef;
+ if (canvas == null) return;
+ int n = mInstanceCache.size();
+ for (int i = 0; i < n; i++) {
+ NinePatchInstance instance = mInstanceCache.valueAt(i);
+ instance.recycle(canvas);
+ }
+ mInstanceCache.clear();
+ }
+}
+
+// This keeps data for a specialization of NinePatchTexture with the size
+// (width, height). We pre-compute the coordinates for efficiency.
+class NinePatchInstance {
+
+ @SuppressWarnings("unused")
+ private static final String TAG = "NinePatchInstance";
+
+ // We need 16 vertices for a normal nine-patch image (the 4x4 vertices)
+ private static final int VERTEX_BUFFER_SIZE = 16 * 2;
+
+ // We need 22 indices for a normal nine-patch image, plus 2 for each
+ // transparent region. Current there are at most 1 transparent region.
+ private static final int INDEX_BUFFER_SIZE = 22 + 2;
+
+ private FloatBuffer mXyBuffer;
+ private FloatBuffer mUvBuffer;
+ private ByteBuffer mIndexBuffer;
+
+ // Names for buffer names: xy, uv, index.
+ private int mXyBufferName = -1;
+ private int mUvBufferName;
+ private int mIndexBufferName;
+
+ private int mIdxCount;
+
+ public NinePatchInstance(NinePatchTexture tex, int width, int height) {
+ NinePatchChunk chunk = tex.getNinePatchChunk();
+
+ if (width <= 0 || height <= 0) {
+ throw new RuntimeException("invalid dimension");
+ }
+
+ // The code should be easily extended to handle the general cases by
+ // allocating more space for buffers. But let's just handle the only
+ // use case.
+ if (chunk.mDivX.length != 2 || chunk.mDivY.length != 2) {
+ throw new RuntimeException("unsupported nine patch");
+ }
+
+ float divX[] = new float[4];
+ float divY[] = new float[4];
+ float divU[] = new float[4];
+ float divV[] = new float[4];
+
+ int nx = stretch(divX, divU, chunk.mDivX, tex.getWidth(), width);
+ int ny = stretch(divY, divV, chunk.mDivY, tex.getHeight(), height);
+
+ prepareVertexData(divX, divY, divU, divV, nx, ny, chunk.mColor);
+ }
+
+ /**
+ * Stretches the texture according to the nine-patch rules. It will
+ * linearly distribute the strechy parts defined in the nine-patch chunk to
+ * the target area.
+ *
+ * <pre>
+ * source
+ * /--------------^---------------\
+ * u0 u1 u2 u3 u4 u5
+ * div ---> |fffff|ssssssss|fff|ssssss|ffff| ---> u
+ * | div0 div1 div2 div3 |
+ * | | / / / /
+ * | | / / / /
+ * | | / / / /
+ * |fffff|ssss|fff|sss|ffff| ---> x
+ * x0 x1 x2 x3 x4 x5
+ * \----------v------------/
+ * target
+ *
+ * f: fixed segment
+ * s: stretchy segment
+ * </pre>
+ *
+ * @param div the stretch parts defined in nine-patch chunk
+ * @param source the length of the texture
+ * @param target the length on the drawing plan
+ * @param u output, the positions of these dividers in the texture
+ * coordinate
+ * @param x output, the corresponding position of these dividers on the
+ * drawing plan
+ * @return the number of these dividers.
+ */
+ private static int stretch(
+ float x[], float u[], int div[], int source, int target) {
+ int textureSize = Utils.nextPowerOf2(source);
+ float textureBound = (float) source / textureSize;
+
+ float stretch = 0;
+ for (int i = 0, n = div.length; i < n; i += 2) {
+ stretch += div[i + 1] - div[i];
+ }
+
+ float remaining = target - source + stretch;
+
+ float lastX = 0;
+ float lastU = 0;
+
+ x[0] = 0;
+ u[0] = 0;
+ for (int i = 0, n = div.length; i < n; i += 2) {
+ // Make the stretchy segment a little smaller to prevent sampling
+ // on neighboring fixed segments.
+ // fixed segment
+ x[i + 1] = lastX + (div[i] - lastU) + 0.5f;
+ u[i + 1] = Math.min((div[i] + 0.5f) / textureSize, textureBound);
+
+ // stretchy segment
+ float partU = div[i + 1] - div[i];
+ float partX = remaining * partU / stretch;
+ remaining -= partX;
+ stretch -= partU;
+
+ lastX = x[i + 1] + partX;
+ lastU = div[i + 1];
+ x[i + 2] = lastX - 0.5f;
+ u[i + 2] = Math.min((lastU - 0.5f)/ textureSize, textureBound);
+ }
+ // the last fixed segment
+ x[div.length + 1] = target;
+ u[div.length + 1] = textureBound;
+
+ // remove segments with length 0.
+ int last = 0;
+ for (int i = 1, n = div.length + 2; i < n; ++i) {
+ if ((x[i] - x[last]) < 1f) continue;
+ x[++last] = x[i];
+ u[last] = u[i];
+ }
+ return last + 1;
+ }
+
+ private void prepareVertexData(float x[], float y[], float u[], float v[],
+ int nx, int ny, int[] color) {
+ /*
+ * Given a 3x3 nine-patch image, the vertex order is defined as the
+ * following graph:
+ *
+ * (0) (1) (2) (3)
+ * | /| /| /|
+ * | / | / | / |
+ * (4) (5) (6) (7)
+ * | \ | \ | \ |
+ * | \| \| \|
+ * (8) (9) (A) (B)
+ * | /| /| /|
+ * | / | / | / |
+ * (C) (D) (E) (F)
+ *
+ * And we draw the triangle strip in the following index order:
+ *
+ * index: 04152637B6A5948C9DAEBF
+ */
+ int pntCount = 0;
+ float xy[] = new float[VERTEX_BUFFER_SIZE];
+ float uv[] = new float[VERTEX_BUFFER_SIZE];
+ for (int j = 0; j < ny; ++j) {
+ for (int i = 0; i < nx; ++i) {
+ int xIndex = (pntCount++) << 1;
+ int yIndex = xIndex + 1;
+ xy[xIndex] = x[i];
+ xy[yIndex] = y[j];
+ uv[xIndex] = u[i];
+ uv[yIndex] = v[j];
+ }
+ }
+
+ int idxCount = 1;
+ boolean isForward = false;
+ byte index[] = new byte[INDEX_BUFFER_SIZE];
+ for (int row = 0; row < ny - 1; row++) {
+ --idxCount;
+ isForward = !isForward;
+
+ int start, end, inc;
+ if (isForward) {
+ start = 0;
+ end = nx;
+ inc = 1;
+ } else {
+ start = nx - 1;
+ end = -1;
+ inc = -1;
+ }
+
+ for (int col = start; col != end; col += inc) {
+ int k = row * nx + col;
+ if (col != start) {
+ int colorIdx = row * (nx - 1) + col;
+ if (isForward) colorIdx--;
+ if (color[colorIdx] == NinePatchChunk.TRANSPARENT_COLOR) {
+ index[idxCount] = index[idxCount - 1];
+ ++idxCount;
+ index[idxCount++] = (byte) k;
+ }
+ }
+
+ index[idxCount++] = (byte) k;
+ index[idxCount++] = (byte) (k + nx);
+ }
+ }
+
+ mIdxCount = idxCount;
+
+ int size = (pntCount * 2) * (Float.SIZE / Byte.SIZE);
+ mXyBuffer = allocateDirectNativeOrderBuffer(size).asFloatBuffer();
+ mUvBuffer = allocateDirectNativeOrderBuffer(size).asFloatBuffer();
+ mIndexBuffer = allocateDirectNativeOrderBuffer(mIdxCount);
+
+ mXyBuffer.put(xy, 0, pntCount * 2).position(0);
+ mUvBuffer.put(uv, 0, pntCount * 2).position(0);
+ mIndexBuffer.put(index, 0, idxCount).position(0);
+ }
+
+ private static ByteBuffer allocateDirectNativeOrderBuffer(int size) {
+ return ByteBuffer.allocateDirect(size).order(ByteOrder.nativeOrder());
+ }
+
+ private void prepareBuffers(GLCanvas canvas) {
+ mXyBufferName = canvas.uploadBuffer(mXyBuffer);
+ mUvBufferName = canvas.uploadBuffer(mUvBuffer);
+ mIndexBufferName = canvas.uploadBuffer(mIndexBuffer);
+
+ // These buffers are never used again.
+ mXyBuffer = null;
+ mUvBuffer = null;
+ mIndexBuffer = null;
+ }
+
+ public void draw(GLCanvas canvas, NinePatchTexture tex, int x, int y) {
+ if (mXyBufferName == -1) {
+ prepareBuffers(canvas);
+ }
+ canvas.drawMesh(tex, x, y, mXyBufferName, mUvBufferName, mIndexBufferName, mIdxCount);
+ }
+
+ public void recycle(GLCanvas canvas) {
+ if (mXyBuffer == null) {
+ canvas.deleteBuffer(mXyBufferName);
+ canvas.deleteBuffer(mUvBufferName);
+ canvas.deleteBuffer(mIndexBufferName);
+ mXyBufferName = -1;
+ }
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/glrenderer/RawTexture.java b/src/com/android/camera/support/glrenderer/RawTexture.java
new file mode 100644
index 000000000..b86282d0e
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/RawTexture.java
@@ -0,0 +1,58 @@
+package com.android.camera.support.glrenderer;
+
+
+import android.util.Log;
+
+import javax.microedition.khronos.opengles.GL11;
+
+public class RawTexture extends BasicTexture {
+ private static final String TAG = "RawTexture";
+
+ private final boolean mOpaque;
+ private boolean mIsFlipped;
+
+ public RawTexture(int width, int height, boolean opaque) {
+ mOpaque = opaque;
+ setSize(width, height);
+ }
+
+ @Override
+ public boolean isOpaque() {
+ return mOpaque;
+ }
+
+ @Override
+ public boolean isFlippedVertically() {
+ return mIsFlipped;
+ }
+
+ public void setIsFlippedVertically(boolean isFlipped) {
+ mIsFlipped = isFlipped;
+ }
+
+ protected void prepare(GLCanvas canvas) {
+ GLId glId = canvas.getGLId();
+ mId = glId.generateTexture();
+ canvas.initializeTextureSize(this, GL11.GL_RGBA, GL11.GL_UNSIGNED_BYTE);
+ canvas.setTextureParameters(this);
+ mState = STATE_LOADED;
+ setAssociatedCanvas(canvas);
+ }
+
+ @Override
+ protected boolean onBind(GLCanvas canvas) {
+ if (isLoaded()) return true;
+ Log.w(TAG, "lost the content due to context change");
+ return false;
+ }
+
+ @Override
+ public void yield() {
+ // we cannot free the texture because we have no backup.
+ }
+
+ @Override
+ protected int getTarget() {
+ return GL11.GL_TEXTURE_2D;
+ }
+}
diff --git a/src/com/android/camera/support/glrenderer/ResourceTexture.java b/src/com/android/camera/support/glrenderer/ResourceTexture.java
new file mode 100644
index 000000000..121c184cd
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/ResourceTexture.java
@@ -0,0 +1,37 @@
+package com.android.camera.support.glrenderer;
+
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.graphics.BitmapFactory;
+
+import junit.framework.Assert;
+
+// ResourceTexture is a texture whose Bitmap is decoded from a resource.
+// By default ResourceTexture is not opaque.
+public class ResourceTexture extends UploadedTexture {
+
+ protected final Context mContext;
+ protected final int mResId;
+
+ public ResourceTexture(Context context, int resId) {
+ Assert.assertNotNull(context);
+ mContext = context;
+ mResId = resId;
+ setOpaque(false);
+ }
+
+ @Override
+ protected Bitmap onGetBitmap() {
+ BitmapFactory.Options options = new BitmapFactory.Options();
+ options.inPreferredConfig = Bitmap.Config.ARGB_8888;
+ return BitmapFactory.decodeResource(
+ mContext.getResources(), mResId, options);
+ }
+
+ @Override
+ protected void onFreeBitmap(Bitmap bitmap) {
+ if (!inFinalizer()) {
+ bitmap.recycle();
+ }
+ }
+}
diff --git a/src/com/android/camera/support/glrenderer/Texture.java b/src/com/android/camera/support/glrenderer/Texture.java
new file mode 100644
index 000000000..4d1a49d2b
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/Texture.java
@@ -0,0 +1,27 @@
+package com.android.camera.support.glrenderer;
+
+//Texture is a rectangular image which can be drawn on GLCanvas.
+//The isOpaque() function gives a hint about whether the texture is opaque,
+//so the drawing can be done faster.
+//
+//This is the current texture hierarchy:
+//
+//Texture
+//-- ColorTexture
+//-- FadeInTexture
+//-- BasicTexture
+// -- UploadedTexture
+// -- BitmapTexture
+// -- Tile
+// -- ResourceTexture
+// -- NinePatchTexture
+// -- CanvasTexture
+// -- StringTexture
+//
+public interface Texture {
+ public int getWidth();
+ public int getHeight();
+ public void draw(GLCanvas canvas, int x, int y);
+ public void draw(GLCanvas canvas, int x, int y, int w, int h);
+ public boolean isOpaque();
+}
diff --git a/src/com/android/camera/support/glrenderer/UploadedTexture.java b/src/com/android/camera/support/glrenderer/UploadedTexture.java
new file mode 100644
index 000000000..106aa4ef3
--- /dev/null
+++ b/src/com/android/camera/support/glrenderer/UploadedTexture.java
@@ -0,0 +1,283 @@
+package com.android.camera.support.glrenderer;
+
+
+import android.graphics.Bitmap;
+import android.graphics.Bitmap.Config;
+import android.opengl.GLUtils;
+
+import junit.framework.Assert;
+
+import java.util.HashMap;
+
+import javax.microedition.khronos.opengles.GL11;
+
+// UploadedTextures use a Bitmap for the content of the texture.
+//
+// Subclasses should implement onGetBitmap() to provide the Bitmap and
+// implement onFreeBitmap(mBitmap) which will be called when the Bitmap
+// is not needed anymore.
+//
+// isContentValid() is meaningful only when the isLoaded() returns true.
+// It means whether the content needs to be updated.
+//
+// The user of this class should call recycle() when the texture is not
+// needed anymore.
+//
+// By default an UploadedTexture is opaque (so it can be drawn faster without
+// blending). The user or subclass can override it using setOpaque().
+public abstract class UploadedTexture extends BasicTexture {
+
+ // To prevent keeping allocation the borders, we store those used borders here.
+ // Since the length will be power of two, it won't use too much memory.
+ private static HashMap<BorderKey, Bitmap> sBorderLines =
+ new HashMap<BorderKey, Bitmap>();
+ private static BorderKey sBorderKey = new BorderKey();
+
+ @SuppressWarnings("unused")
+ private static final String TAG = "Texture";
+ private boolean mContentValid = true;
+
+ // indicate this textures is being uploaded in background
+ private boolean mIsUploading = false;
+ private boolean mOpaque = true;
+ private boolean mThrottled = false;
+ private static int sUploadedCount;
+ private static final int UPLOAD_LIMIT = 100;
+
+ protected Bitmap mBitmap;
+ private int mBorder;
+
+ protected UploadedTexture() {
+ this(false);
+ }
+
+ protected UploadedTexture(boolean hasBorder) {
+ super(null, 0, STATE_UNLOADED);
+ if (hasBorder) {
+ setBorder(true);
+ mBorder = 1;
+ }
+ }
+
+ protected void setIsUploading(boolean uploading) {
+ mIsUploading = uploading;
+ }
+
+ public boolean isUploading() {
+ return mIsUploading;
+ }
+
+ private static class BorderKey implements Cloneable {
+ public boolean vertical;
+ public Config config;
+ public int length;
+
+ @Override
+ public int hashCode() {
+ int x = config.hashCode() ^ length;
+ return vertical ? x : -x;
+ }
+
+ @Override
+ public boolean equals(Object object) {
+ if (!(object instanceof BorderKey)) return false;
+ BorderKey o = (BorderKey) object;
+ return vertical == o.vertical
+ && config == o.config && length == o.length;
+ }
+
+ @Override
+ public BorderKey clone() {
+ try {
+ return (BorderKey) super.clone();
+ } catch (CloneNotSupportedException e) {
+ throw new AssertionError(e);
+ }
+ }
+ }
+
+ protected void setThrottled(boolean throttled) {
+ mThrottled = throttled;
+ }
+
+ private static Bitmap getBorderLine(
+ boolean vertical, Config config, int length) {
+ BorderKey key = sBorderKey;
+ key.vertical = vertical;
+ key.config = config;
+ key.length = length;
+ Bitmap bitmap = sBorderLines.get(key);
+ if (bitmap == null) {
+ bitmap = vertical
+ ? Bitmap.createBitmap(1, length, config)
+ : Bitmap.createBitmap(length, 1, config);
+ sBorderLines.put(key.clone(), bitmap);
+ }
+ return bitmap;
+ }
+
+ private Bitmap getBitmap() {
+ if (mBitmap == null) {
+ mBitmap = onGetBitmap();
+ int w = mBitmap.getWidth() + mBorder * 2;
+ int h = mBitmap.getHeight() + mBorder * 2;
+ if (mWidth == UNSPECIFIED) {
+ setSize(w, h);
+ }
+ }
+ return mBitmap;
+ }
+
+ private void freeBitmap() {
+ Assert.assertTrue(mBitmap != null);
+ onFreeBitmap(mBitmap);
+ mBitmap = null;
+ }
+
+ @Override
+ public int getWidth() {
+ if (mWidth == UNSPECIFIED) getBitmap();
+ return mWidth;
+ }
+
+ @Override
+ public int getHeight() {
+ if (mWidth == UNSPECIFIED) getBitmap();
+ return mHeight;
+ }
+
+ protected abstract Bitmap onGetBitmap();
+
+ protected abstract void onFreeBitmap(Bitmap bitmap);
+
+ protected void invalidateContent() {
+ if (mBitmap != null) freeBitmap();
+ mContentValid = false;
+ mWidth = UNSPECIFIED;
+ mHeight = UNSPECIFIED;
+ }
+
+ /**
+ * Whether the content on GPU is valid.
+ */
+ public boolean isContentValid() {
+ return isLoaded() && mContentValid;
+ }
+
+ /**
+ * Updates the content on GPU's memory.
+ * @param canvas
+ */
+ public void updateContent(GLCanvas canvas) {
+ if (!isLoaded()) {
+ if (mThrottled && ++sUploadedCount > UPLOAD_LIMIT) {
+ return;
+ }
+ uploadToCanvas(canvas);
+ } else if (!mContentValid) {
+ Bitmap bitmap = getBitmap();
+ int format = GLUtils.getInternalFormat(bitmap);
+ int type = GLUtils.getType(bitmap);
+ canvas.texSubImage2D(this, mBorder, mBorder, bitmap, format, type);
+ freeBitmap();
+ mContentValid = true;
+ }
+ }
+
+ public static void resetUploadLimit() {
+ sUploadedCount = 0;
+ }
+
+ public static boolean uploadLimitReached() {
+ return sUploadedCount > UPLOAD_LIMIT;
+ }
+
+ private void uploadToCanvas(GLCanvas canvas) {
+
+ Bitmap bitmap = getBitmap();
+ if (bitmap != null) {
+ try {
+ int bWidth = bitmap.getWidth();
+ int bHeight = bitmap.getHeight();
+ int width = bWidth + mBorder * 2;
+ int height = bHeight + mBorder * 2;
+ int texWidth = getTextureWidth();
+ int texHeight = getTextureHeight();
+
+ Assert.assertTrue(bWidth <= texWidth && bHeight <= texHeight);
+
+ // Upload the bitmap to a new texture.
+ mId = canvas.getGLId().generateTexture();
+ canvas.setTextureParameters(this);
+
+ if (bWidth == texWidth && bHeight == texHeight) {
+ canvas.initializeTexture(this, bitmap);
+ } else {
+ int format = GLUtils.getInternalFormat(bitmap);
+ int type = GLUtils.getType(bitmap);
+ Config config = bitmap.getConfig();
+
+ canvas.initializeTextureSize(this, format, type);
+ canvas.texSubImage2D(this, mBorder, mBorder, bitmap, format, type);
+
+ if (mBorder > 0) {
+ // Left border
+ Bitmap line = getBorderLine(true, config, texHeight);
+ canvas.texSubImage2D(this, 0, 0, line, format, type);
+
+ // Top border
+ line = getBorderLine(false, config, texWidth);
+ canvas.texSubImage2D(this, 0, 0, line, format, type);
+ }
+
+ // Right border
+ if (mBorder + bWidth < texWidth) {
+ Bitmap line = getBorderLine(true, config, texHeight);
+ canvas.texSubImage2D(this, mBorder + bWidth, 0, line, format, type);
+ }
+
+ // Bottom border
+ if (mBorder + bHeight < texHeight) {
+ Bitmap line = getBorderLine(false, config, texWidth);
+ canvas.texSubImage2D(this, 0, mBorder + bHeight, line, format, type);
+ }
+ }
+ } finally {
+ freeBitmap();
+ }
+ // Update texture state.
+ setAssociatedCanvas(canvas);
+ mState = STATE_LOADED;
+ mContentValid = true;
+ } else {
+ mState = STATE_ERROR;
+ throw new RuntimeException("Texture load fail, no bitmap");
+ }
+ }
+
+ @Override
+ protected boolean onBind(GLCanvas canvas) {
+ updateContent(canvas);
+ return isContentValid();
+ }
+
+ @Override
+ protected int getTarget() {
+ return GL11.GL_TEXTURE_2D;
+ }
+
+ public void setOpaque(boolean isOpaque) {
+ mOpaque = isOpaque;
+ }
+
+ @Override
+ public boolean isOpaque() {
+ return mOpaque;
+ }
+
+ @Override
+ public void recycle() {
+ super.recycle();
+ if (mBitmap != null) freeBitmap();
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/ui/BitmapScreenNail.java b/src/com/android/camera/support/ui/BitmapScreenNail.java
new file mode 100644
index 000000000..a0252b9a5
--- /dev/null
+++ b/src/com/android/camera/support/ui/BitmapScreenNail.java
@@ -0,0 +1,45 @@
+package com.android.camera.support.ui;
+
+import android.graphics.Bitmap;
+import android.graphics.RectF;
+
+import com.android.camera.support.glrenderer.BitmapTexture;
+import com.android.camera.support.glrenderer.GLCanvas;
+
+public class BitmapScreenNail implements ScreenNail {
+ private final BitmapTexture mBitmapTexture;
+
+ public BitmapScreenNail(Bitmap bitmap) {
+ mBitmapTexture = new BitmapTexture(bitmap);
+ }
+
+ @Override
+ public int getWidth() {
+ return mBitmapTexture.getWidth();
+ }
+
+ @Override
+ public int getHeight() {
+ return mBitmapTexture.getHeight();
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, int x, int y, int width, int height) {
+ mBitmapTexture.draw(canvas, x, y, width, height);
+ }
+
+ @Override
+ public void noDraw() {
+ // do nothing
+ }
+
+ @Override
+ public void recycle() {
+ mBitmapTexture.recycle();
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, RectF source, RectF dest) {
+ canvas.drawTexture(mBitmapTexture, source, dest);
+ }
+}
diff --git a/src/com/android/camera/support/ui/OrientationSource.java b/src/com/android/camera/support/ui/OrientationSource.java
new file mode 100644
index 000000000..a2841eec4
--- /dev/null
+++ b/src/com/android/camera/support/ui/OrientationSource.java
@@ -0,0 +1,6 @@
+package com.android.camera.support.ui;
+
+public interface OrientationSource {
+ public int getDisplayRotation();
+ public int getCompensation();
+}
diff --git a/src/com/android/camera/support/ui/ScreenNail.java b/src/com/android/camera/support/ui/ScreenNail.java
new file mode 100644
index 000000000..67b2f58c5
--- /dev/null
+++ b/src/com/android/camera/support/ui/ScreenNail.java
@@ -0,0 +1,21 @@
+package com.android.camera.support.ui;
+
+
+import android.graphics.RectF;
+
+import com.android.camera.support.glrenderer.GLCanvas;
+
+public interface ScreenNail {
+ public int getWidth();
+ public int getHeight();
+ public void draw(GLCanvas canvas, int x, int y, int width, int height);
+
+ // We do not need to draw this ScreenNail in this frame.
+ public void noDraw();
+
+ // This ScreenNail will not be used anymore. Release related resources.
+ public void recycle();
+
+ // This is only used by TileImageView to back up the tiles not yet loaded.
+ public void draw(GLCanvas canvas, RectF source, RectF dest);
+}
diff --git a/src/com/android/camera/support/ui/SurfaceTextureScreenNail.java b/src/com/android/camera/support/ui/SurfaceTextureScreenNail.java
new file mode 100644
index 000000000..d78b1ba4a
--- /dev/null
+++ b/src/com/android/camera/support/ui/SurfaceTextureScreenNail.java
@@ -0,0 +1,128 @@
+package com.android.camera.support.ui;
+
+
+import android.annotation.TargetApi;
+import android.graphics.RectF;
+import android.graphics.SurfaceTexture;
+
+import com.android.camera.support.common.ApiHelper;
+import com.android.camera.support.glrenderer.ExtTexture;
+import com.android.camera.support.glrenderer.GLCanvas;
+
+
+@TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB)
+public abstract class SurfaceTextureScreenNail implements ScreenNail,
+ SurfaceTexture.OnFrameAvailableListener {
+ @SuppressWarnings("unused")
+ private static final String TAG = "SurfaceTextureScreenNail";
+ // This constant is not available in API level before 15, but it was just an
+ // oversight.
+ private static final int GL_TEXTURE_EXTERNAL_OES = 0x8D65;
+
+ protected ExtTexture mExtTexture;
+ private SurfaceTexture mSurfaceTexture;
+ private int mWidth, mHeight;
+ private float[] mTransform = new float[16];
+ private boolean mHasTexture = false;
+
+ public SurfaceTextureScreenNail() {
+ }
+
+ public void acquireSurfaceTexture(GLCanvas canvas) {
+ mExtTexture = new ExtTexture(canvas, GL_TEXTURE_EXTERNAL_OES);
+ mExtTexture.setSize(mWidth, mHeight);
+ mSurfaceTexture = new SurfaceTexture(mExtTexture.getId());
+ setDefaultBufferSize(mSurfaceTexture, mWidth, mHeight);
+ mSurfaceTexture.setOnFrameAvailableListener(this);
+ synchronized (this) {
+ mHasTexture = true;
+ }
+ }
+
+ @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH_MR1)
+ private static void setDefaultBufferSize(SurfaceTexture st, int width, int height) {
+ if (ApiHelper.HAS_SET_DEFALT_BUFFER_SIZE) {
+ st.setDefaultBufferSize(width, height);
+ }
+ }
+
+ @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
+ private static void releaseSurfaceTexture(SurfaceTexture st) {
+ st.setOnFrameAvailableListener(null);
+ if (ApiHelper.HAS_RELEASE_SURFACE_TEXTURE) {
+ st.release();
+ }
+ }
+
+ public SurfaceTexture getSurfaceTexture() {
+ return mSurfaceTexture;
+ }
+
+ public void releaseSurfaceTexture() {
+ synchronized (this) {
+ mHasTexture = false;
+ }
+ mExtTexture.recycle();
+ mExtTexture = null;
+ releaseSurfaceTexture(mSurfaceTexture);
+ mSurfaceTexture = null;
+ }
+
+ public void setSize(int width, int height) {
+ mWidth = width;
+ mHeight = height;
+ }
+
+ public void resizeTexture() {
+ if (mExtTexture != null) {
+ mExtTexture.setSize(mWidth, mHeight);
+ setDefaultBufferSize(mSurfaceTexture, mWidth, mHeight);
+ }
+ }
+
+ @Override
+ public int getWidth() {
+ return mWidth;
+ }
+
+ @Override
+ public int getHeight() {
+ return mHeight;
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, int x, int y, int width, int height) {
+ synchronized (this) {
+ if (!mHasTexture) return;
+ mSurfaceTexture.updateTexImage();
+ mSurfaceTexture.getTransformMatrix(mTransform);
+
+ // Flip vertically.
+ canvas.save(GLCanvas.SAVE_FLAG_MATRIX);
+ int cx = x + width / 2;
+ int cy = y + height / 2;
+ canvas.translate(cx, cy);
+ canvas.scale(1, -1, 1);
+ canvas.translate(-cx, -cy);
+ updateTransformMatrix(mTransform);
+ canvas.drawTexture(mExtTexture, mTransform, x, y, width, height);
+ canvas.restore();
+ }
+ }
+
+ @Override
+ public void draw(GLCanvas canvas, RectF source, RectF dest) {
+ throw new UnsupportedOperationException();
+ }
+
+ protected void updateTransformMatrix(float[] matrix) {}
+
+ @Override
+ abstract public void noDraw();
+
+ @Override
+ abstract public void recycle();
+
+ @Override
+ abstract public void onFrameAvailable(SurfaceTexture surfaceTexture);
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/util/AccessibilityUtils.java b/src/com/android/camera/support/util/AccessibilityUtils.java
new file mode 100644
index 000000000..e758e2f7e
--- /dev/null
+++ b/src/com/android/camera/support/util/AccessibilityUtils.java
@@ -0,0 +1,38 @@
+package com.android.camera.support.util;
+
+import android.content.Context;
+import android.support.v4.view.accessibility.AccessibilityRecordCompat;
+import android.view.View;
+import android.view.accessibility.AccessibilityEvent;
+import android.view.accessibility.AccessibilityManager;
+
+import com.android.camera.support.common.ApiHelper;
+
+/**
+ * AccessibilityUtils provides functions needed in accessibility mode. All the functions
+ * in this class are made compatible with gingerbread and later API's
+*/
+public class AccessibilityUtils {
+ public static void makeAnnouncement(View view, CharSequence announcement) {
+ if (view == null)
+ return;
+ if (ApiHelper.HAS_ANNOUNCE_FOR_ACCESSIBILITY) {
+ view.announceForAccessibility(announcement);
+ } else {
+ // For API 15 and earlier, we need to construct an accessibility event
+ Context ctx = view.getContext();
+ AccessibilityManager am = (AccessibilityManager) ctx.getSystemService(
+ Context.ACCESSIBILITY_SERVICE);
+ if (!am.isEnabled()) return;
+ AccessibilityEvent event = AccessibilityEvent.obtain(
+ AccessibilityEvent.TYPE_NOTIFICATION_STATE_CHANGED);
+ AccessibilityRecordCompat arc = new AccessibilityRecordCompat(event);
+ arc.setSource(view);
+ event.setClassName(view.getClass().getName());
+ event.setPackageName(view.getContext().getPackageName());
+ event.setEnabled(view.isEnabled());
+ event.getText().add(announcement);
+ am.sendAccessibilityEvent(event);
+ }
+ }
+} \ No newline at end of file
diff --git a/src/com/android/camera/support/util/MotionEventHelper.java b/src/com/android/camera/support/util/MotionEventHelper.java
new file mode 100644
index 000000000..2a3340e28
--- /dev/null
+++ b/src/com/android/camera/support/util/MotionEventHelper.java
@@ -0,0 +1,105 @@
+package com.android.camera.support.util;
+
+import android.annotation.TargetApi;
+import android.graphics.Matrix;
+import android.util.FloatMath;
+import android.view.MotionEvent;
+import android.view.MotionEvent.PointerCoords;
+
+import com.android.gallery3d.common.ApiHelper;
+
+public final class MotionEventHelper {
+ private MotionEventHelper() {}
+
+ public static MotionEvent transformEvent(MotionEvent e, Matrix m) {
+ // We try to use the new transform method if possible because it uses
+ // less memory.
+ if (ApiHelper.HAS_MOTION_EVENT_TRANSFORM) {
+ return transformEventNew(e, m);
+ } else {
+ return transformEventOld(e, m);
+ }
+ }
+
+ @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB)
+ private static MotionEvent transformEventNew(MotionEvent e, Matrix m) {
+ MotionEvent newEvent = MotionEvent.obtain(e);
+ newEvent.transform(m);
+ return newEvent;
+ }
+
+ // This is copied from Input.cpp in the android framework.
+ private static MotionEvent transformEventOld(MotionEvent e, Matrix m) {
+ long downTime = e.getDownTime();
+ long eventTime = e.getEventTime();
+ int action = e.getAction();
+ int pointerCount = e.getPointerCount();
+ int[] pointerIds = getPointerIds(e);
+ PointerCoords[] pointerCoords = getPointerCoords(e);
+ int metaState = e.getMetaState();
+ float xPrecision = e.getXPrecision();
+ float yPrecision = e.getYPrecision();
+ int deviceId = e.getDeviceId();
+ int edgeFlags = e.getEdgeFlags();
+ int source = e.getSource();
+ int flags = e.getFlags();
+
+ // Copy the x and y coordinates into an array, map them, and copy back.
+ float[] xy = new float[pointerCoords.length * 2];
+ for (int i = 0; i < pointerCount;i++) {
+ xy[2 * i] = pointerCoords[i].x;
+ xy[2 * i + 1] = pointerCoords[i].y;
+ }
+ m.mapPoints(xy);
+ for (int i = 0; i < pointerCount;i++) {
+ pointerCoords[i].x = xy[2 * i];
+ pointerCoords[i].y = xy[2 * i + 1];
+ pointerCoords[i].orientation = transformAngle(
+ m, pointerCoords[i].orientation);
+ }
+
+ MotionEvent n = MotionEvent.obtain(downTime, eventTime, action,
+ pointerCount, pointerIds, pointerCoords, metaState, xPrecision,
+ yPrecision, deviceId, edgeFlags, source, flags);
+
+ return n;
+ }
+
+ private static int[] getPointerIds(MotionEvent e) {
+ int n = e.getPointerCount();
+ int[] r = new int[n];
+ for (int i = 0; i < n; i++) {
+ r[i] = e.getPointerId(i);
+ }
+ return r;
+ }
+
+ private static PointerCoords[] getPointerCoords(MotionEvent e) {
+ int n = e.getPointerCount();
+ PointerCoords[] r = new PointerCoords[n];
+ for (int i = 0; i < n; i++) {
+ r[i] = new PointerCoords();
+ e.getPointerCoords(i, r[i]);
+ }
+ return r;
+ }
+
+ private static float transformAngle(Matrix m, float angleRadians) {
+ // Construct and transform a vector oriented at the specified clockwise
+ // angle from vertical. Coordinate system: down is increasing Y, right is
+ // increasing X.
+ float[] v = new float[2];
+ v[0] = FloatMath.sin(angleRadians);
+ v[1] = -FloatMath.cos(angleRadians);
+ m.mapVectors(v);
+
+ // Derive the transformed vector's clockwise angle from vertical.
+ float result = (float) Math.atan2(v[0], -v[1]);
+ if (result < -Math.PI / 2) {
+ result += Math.PI;
+ } else if (result > Math.PI / 2) {
+ result -= Math.PI;
+ }
+ return result;
+ }
+}