summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/AlignFeatures.cpp231
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/AlignFeatures.h93
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Blend.cpp1410
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Blend.h128
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/CSite.h63
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Delaunay.cpp633
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Delaunay.h126
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/EdgePointerUtil.h37
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Geometry.h156
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/ImageUtils.cpp408
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/ImageUtils.h173
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Interp.h80
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Log.h24
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/MatrixUtils.h141
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Mosaic.cpp265
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Mosaic.h226
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/MosaicTypes.h154
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Pyramid.cpp270
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/Pyramid.h54
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/trsMatrix.cpp94
-rw-r--r--jni_mosaic/feature_mos/src/mosaic/trsMatrix.h53
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.cpp98
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.h34
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/Renderer.cpp226
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/Renderer.h65
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp186
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.h44
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.cpp190
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.h48
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.cpp160
-rwxr-xr-xjni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.h35
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/dbreg.cpp794
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/dbreg.h581
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.cpp330
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.h157
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/targetver.h40
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.c377
-rw-r--r--jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.h282
-rw-r--r--jni_mosaic/feature_stab/src/dbregtest/PgmImage.cpp260
-rw-r--r--jni_mosaic/feature_stab/src/dbregtest/PgmImage.h95
-rw-r--r--jni_mosaic/feature_stab/src/dbregtest/dbregtest.cpp399
-rw-r--r--jni_mosaic/feature_stab/src/dbregtest/stdafx.cpp24
-rw-r--r--jni_mosaic/feature_stab/src/dbregtest/stdafx.h28
-rw-r--r--jni_mosaic/feature_stab/src/dbregtest/targetver.h29
-rw-r--r--src/com/android/camera/CameraActivity.java659
-rw-r--r--src/com/android/camera/CameraModule.java10
-rw-r--r--src/com/android/camera/Mosaic.java206
-rw-r--r--src/com/android/camera/MosaicFrameProcessor.java237
-rw-r--r--src/com/android/camera/MosaicPreviewRenderer.java265
-rw-r--r--src/com/android/camera/MosaicRenderer.java89
-rw-r--r--src/com/android/camera/NewCameraActivity.java433
-rw-r--r--src/com/android/camera/NewCameraModule.java74
-rw-r--r--src/com/android/camera/NewPhotoMenu.java200
-rw-r--r--src/com/android/camera/NewPhotoModule.java2032
-rw-r--r--src/com/android/camera/NewPhotoUI.java787
-rw-r--r--src/com/android/camera/NewPreviewGestures.java263
-rw-r--r--src/com/android/camera/NewVideoMenu.java205
-rw-r--r--src/com/android/camera/NewVideoModule.java2253
-rw-r--r--src/com/android/camera/NewVideoUI.java706
-rw-r--r--src/com/android/camera/PanoProgressBar.java188
-rw-r--r--src/com/android/camera/PanoUtil.java86
-rw-r--r--src/com/android/camera/PanoramaModule.java1304
-rw-r--r--src/com/android/camera/PhotoController.java1
-rw-r--r--src/com/android/camera/PhotoMenu.java2
-rw-r--r--src/com/android/camera/PhotoModule.java225
-rw-r--r--src/com/android/camera/PhotoUI.java405
-rw-r--r--src/com/android/camera/PreviewGestures.java293
-rw-r--r--src/com/android/camera/SurfaceTextureRenderer.java224
-rw-r--r--src/com/android/camera/VideoController.java2
-rw-r--r--src/com/android/camera/VideoMenu.java3
-rw-r--r--src/com/android/camera/VideoModule.java304
-rw-r--r--src/com/android/camera/VideoUI.java387
-rw-r--r--src/com/android/camera/data/CameraDataAdapter.java59
-rw-r--r--src/com/android/camera/data/LocalData.java88
-rw-r--r--src/com/android/camera/ui/CameraControls.java85
-rw-r--r--src/com/android/camera/ui/CameraRootView.java67
-rw-r--r--src/com/android/camera/ui/FaceView.java18
-rw-r--r--src/com/android/camera/ui/FilmStripView.java29
-rw-r--r--src/com/android/camera/ui/NewCameraRootView.java145
-rw-r--r--src/com/android/camera/ui/RenderOverlay.java11
-rw-r--r--src/com/android/camera/ui/RotatableLayout.java52
-rw-r--r--src/com/android/gallery3d/app/StateManager.java12
-rw-r--r--src/com/android/gallery3d/data/LocalImage.java8
-rw-r--r--src/com/android/gallery3d/filtershow/FilterShowActivity.java50
-rw-r--r--src/com/android/gallery3d/filtershow/cache/ImageLoader.java4
-rw-r--r--src/com/android/gallery3d/filtershow/filters/BaseFiltersManager.java14
-rw-r--r--src/com/android/gallery3d/filtershow/filters/FilterFxRepresentation.java3
-rw-r--r--src/com/android/gallery3d/filtershow/imageshow/MasterImage.java4
-rw-r--r--src/com/android/gallery3d/filtershow/presets/ImagePreset.java8
-rw-r--r--src/com/android/gallery3d/filtershow/tools/SaveCopyTask.java291
-rw-r--r--src/com/android/gallery3d/ingest/ImportTask.java1
-rw-r--r--src/com/android/gallery3d/ingest/IngestActivity.java21
-rw-r--r--src/com/android/gallery3d/ingest/MtpDeviceIndex.java37
-rw-r--r--src/com/android/gallery3d/ingest/ui/MtpImageView.java55
-rw-r--r--src/com/android/gallery3d/util/SaveVideoFileUtils.java8
-rw-r--r--src/com/android/photos/data/BitmapDecoder.java69
-rw-r--r--src/com/android/photos/drawables/MtpThumbnailDrawable.java61
-rw-r--r--src_pd/com/android/gallery3d/filtershow/filters/FiltersManager.java80
-rw-r--r--src_pd/com/android/gallery3d/util/UsageStatistics.java5
99 files changed, 2179 insertions, 20250 deletions
diff --git a/jni_mosaic/feature_mos/src/mosaic/AlignFeatures.cpp b/jni_mosaic/feature_mos/src/mosaic/AlignFeatures.cpp
deleted file mode 100644
index aeabf8f97..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/AlignFeatures.cpp
+++ /dev/null
@@ -1,231 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// AlignFeatures.cpp
-// S.O. # :
-// Author(s): zkira, mbansal, bsouthall, narodits
-// $Id: AlignFeatures.cpp,v 1.20 2011/06/17 13:35:47 mbansal Exp $
-
-#include <stdio.h>
-#include <string.h>
-
-#include "trsMatrix.h"
-#include "MatrixUtils.h"
-#include "AlignFeatures.h"
-#include "Log.h"
-
-#define LOG_TAG "AlignFeatures"
-
-Align::Align()
-{
- width = height = 0;
- frame_number = 0;
- num_frames_captured = 0;
- reference_frame_index = 0;
- db_Identity3x3(Hcurr);
- db_Identity3x3(Hprev);
-}
-
-Align::~Align()
-{
- // Free gray-scale image
- if (imageGray != ImageUtils::IMAGE_TYPE_NOIMAGE)
- ImageUtils::freeImage(imageGray);
-}
-
-char* Align::getRegProfileString()
-{
- return reg.profile_string;
-}
-
-int Align::initialize(int width, int height, bool _quarter_res, float _thresh_still)
-{
- int nr_corners = DEFAULT_NR_CORNERS;
- double max_disparity = DEFAULT_MAX_DISPARITY;
- int motion_model_type = DEFAULT_MOTION_MODEL;
- int nrsamples = DB_DEFAULT_NR_SAMPLES;
- double scale = DB_POINT_STANDARDDEV;
- int chunk_size = DB_DEFAULT_CHUNK_SIZE;
- int nrhorz = width/48; // Empirically determined number of horizontal
- int nrvert = height/60; // and vertical buckets for harris corner detection.
- bool linear_polish = false;
- unsigned int reference_update_period = DEFAULT_REFERENCE_UPDATE_PERIOD;
-
- const bool DEFAULT_USE_SMALLER_MATCHING_WINDOW = false;
- bool use_smaller_matching_window = DEFAULT_USE_SMALLER_MATCHING_WINDOW;
-
- quarter_res = _quarter_res;
- thresh_still = _thresh_still;
-
- frame_number = 0;
- num_frames_captured = 0;
- reference_frame_index = 0;
- db_Identity3x3(Hcurr);
- db_Identity3x3(Hprev);
-
- if (!reg.Initialized())
- {
- reg.Init(width, height, motion_model_type, 20, linear_polish, quarter_res,
- scale, reference_update_period, false, 0, nrsamples, chunk_size,
- nr_corners, max_disparity, use_smaller_matching_window,
- nrhorz, nrvert);
- }
- this->width = width;
- this->height = height;
-
- imageGray = ImageUtils::allocateImage(width, height, 1);
-
- if (reg.Initialized())
- return ALIGN_RET_OK;
- else
- return ALIGN_RET_ERROR;
-}
-
-int Align::addFrameRGB(ImageType imageRGB)
-{
- ImageUtils::rgb2gray(imageGray, imageRGB, width, height);
- return addFrame(imageGray);
-}
-
-int Align::addFrame(ImageType imageGray_)
-{
- int ret_code = ALIGN_RET_OK;
-
- // Obtain a vector of pointers to rows in image and pass in to dbreg
- ImageType *m_rows = ImageUtils::imageTypeToRowPointers(imageGray_, width, height);
-
- if (frame_number == 0)
- {
- reg.AddFrame(m_rows, Hcurr, true); // Force this to be a reference frame
- int num_corner_ref = reg.GetNrRefCorners();
-
- if (num_corner_ref < MIN_NR_REF_CORNERS)
- {
- return ALIGN_RET_LOW_TEXTURE;
- }
- }
- else
- {
- reg.AddFrame(m_rows, Hcurr, false);
- }
-
- // Average translation per frame =
- // [Translation from Frame0 to Frame(n-1)] / [(n-1)]
- average_tx_per_frame = (num_frames_captured < 2) ? 0.0 :
- Hprev[2] / (num_frames_captured - 1);
-
- // Increment the captured frame counter if we already have a reference frame
- num_frames_captured++;
-
- if (frame_number != 0)
- {
- int num_inliers = reg.GetNrInliers();
-
- if(num_inliers < MIN_NR_INLIERS)
- {
- ret_code = ALIGN_RET_FEW_INLIERS;
-
- Hcurr[0] = 1.0;
- Hcurr[1] = 0.0;
- // Set this as the average per frame translation taking into acccount
- // the separation of the current frame from the reference frame...
- Hcurr[2] = -average_tx_per_frame *
- (num_frames_captured - reference_frame_index);
- Hcurr[3] = 0.0;
- Hcurr[4] = 1.0;
- Hcurr[5] = 0.0;
- Hcurr[6] = 0.0;
- Hcurr[7] = 0.0;
- Hcurr[8] = 1.0;
- }
-
- if(fabs(Hcurr[2])<thresh_still && fabs(Hcurr[5])<thresh_still) // Still camera
- {
- return ALIGN_RET_ERROR;
- }
-
- // compute the homography:
- double Hinv33[3][3];
- double Hprev33[3][3];
- double Hcurr33[3][3];
-
- // Invert and multiple with previous transformation
- Matrix33::convert9to33(Hcurr33, Hcurr);
- Matrix33::convert9to33(Hprev33, Hprev);
- normProjMat33d(Hcurr33);
-
- inv33d(Hcurr33, Hinv33);
-
- mult33d(Hcurr33, Hprev33, Hinv33);
- normProjMat33d(Hcurr33);
- Matrix9::convert33to9(Hprev, Hcurr33);
- // Since we have already factored the current transformation
- // into Hprev, we can reset the Hcurr to identity
- db_Identity3x3(Hcurr);
-
- // Update the reference frame to be the current frame
- reg.UpdateReference(m_rows,quarter_res,false);
-
- // Update the reference frame index
- reference_frame_index = num_frames_captured;
- }
-
- frame_number++;
-
- return ret_code;
-}
-
-// Get current transformation
-int Align::getLastTRS(double trs[3][3])
-{
- if (frame_number < 1)
- {
- trs[0][0] = 1.0;
- trs[0][1] = 0.0;
- trs[0][2] = 0.0;
- trs[1][0] = 0.0;
- trs[1][1] = 1.0;
- trs[1][2] = 0.0;
- trs[2][0] = 0.0;
- trs[2][1] = 0.0;
- trs[2][2] = 1.0;
- return ALIGN_RET_ERROR;
- }
-
- // Note that the logic here handles the case, where a frame is not used for
- // mosaicing but is captured and used in the preview-rendering.
- // For these frames, we don't set Hcurr to identity in AddFrame() and the
- // logic here appends their transformation to Hprev to render them with the
- // correct transformation. For the frames we do use for mosaicing, we already
- // append their Hcurr to Hprev in AddFrame() and then set Hcurr to identity.
-
- double Hinv33[3][3];
- double Hprev33[3][3];
- double Hcurr33[3][3];
-
- Matrix33::convert9to33(Hcurr33, Hcurr);
- normProjMat33d(Hcurr33);
- inv33d(Hcurr33, Hinv33);
-
- Matrix33::convert9to33(Hprev33, Hprev);
-
- mult33d(trs, Hprev33, Hinv33);
- normProjMat33d(trs);
-
- return ALIGN_RET_OK;
-}
-
diff --git a/jni_mosaic/feature_mos/src/mosaic/AlignFeatures.h b/jni_mosaic/feature_mos/src/mosaic/AlignFeatures.h
deleted file mode 100644
index 19f39051d..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/AlignFeatures.h
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// Align.h
-// S.O. # :
-// Author(s): zkira
-// $Id: AlignFeatures.h,v 1.13 2011/06/17 13:35:47 mbansal Exp $
-
-#ifndef ALIGN_H
-#define ALIGN_H
-
-#include "dbreg/dbreg.h"
-#include <db_utilities_camera.h>
-
-#include "ImageUtils.h"
-#include "MatrixUtils.h"
-
-class Align {
-
-public:
- // Types of alignment possible
- static const int ALIGN_TYPE_PAN = 1;
-
- // Return codes
- static const int ALIGN_RET_LOW_TEXTURE = -2;
- static const int ALIGN_RET_ERROR = -1;
- static const int ALIGN_RET_OK = 0;
- static const int ALIGN_RET_FEW_INLIERS = 1;
-
- ///// Settings for feature-based alignment
- // Number of features to use from corner detection
- static const int DEFAULT_NR_CORNERS=750;
- static const double DEFAULT_MAX_DISPARITY=0.1;//0.4;
- // Type of homography to model
- static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_R_T;
-// static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_PROJECTIVE;
-// static const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_AFFINE;
- static const unsigned int DEFAULT_REFERENCE_UPDATE_PERIOD=1500; // Manual reference frame update so set this to a large number
-
- static const int MIN_NR_REF_CORNERS = 25;
- static const int MIN_NR_INLIERS = 10;
-
- Align();
- ~Align();
-
- // Initialization of structures, etc.
- int initialize(int width, int height, bool quarter_res, float thresh_still);
-
- // Add a frame. Note: The alignment computation is performed
- // in this function
- int addFrameRGB(ImageType image);
- int addFrame(ImageType image);
-
- // Obtain the TRS matrix from the last two frames
- int getLastTRS(double trs[3][3]);
- char* getRegProfileString();
-
-protected:
-
- db_FrameToReferenceRegistration reg;
-
- int frame_number;
-
- double Hcurr[9]; // Homography from the alignment reference to the frame-t
- double Hprev[9]; // Homography from frame-0 to the frame-(t-1)
-
- int reference_frame_index; // Index of the reference frame from all captured frames
- int num_frames_captured; // Total number of frames captured (different from frame_number)
- double average_tx_per_frame; // Average pixel translation per captured frame
-
- int width,height;
-
- bool quarter_res; // Whether to process at quarter resolution
- float thresh_still; // Translation threshold in pixels to detect still camera
- ImageType imageGray;
-};
-
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/Blend.cpp b/jni_mosaic/feature_mos/src/mosaic/Blend.cpp
deleted file mode 100644
index ef983ff67..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Blend.cpp
+++ /dev/null
@@ -1,1410 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// Blend.cpp
-// $Id: Blend.cpp,v 1.22 2011/06/24 04:22:14 mbansal Exp $
-
-#include <string.h>
-
-#include "Interp.h"
-#include "Blend.h"
-
-#include "Geometry.h"
-#include "trsMatrix.h"
-
-#include "Log.h"
-#define LOG_TAG "BLEND"
-
-Blend::Blend()
-{
- m_wb.blendingType = BLEND_TYPE_NONE;
-}
-
-Blend::~Blend()
-{
- if (m_pFrameVPyr) free(m_pFrameVPyr);
- if (m_pFrameUPyr) free(m_pFrameUPyr);
- if (m_pFrameYPyr) free(m_pFrameYPyr);
-}
-
-int Blend::initialize(int blendingType, int stripType, int frame_width, int frame_height)
-{
- this->width = frame_width;
- this->height = frame_height;
- this->m_wb.blendingType = blendingType;
- this->m_wb.stripType = stripType;
-
- m_wb.blendRange = m_wb.blendRangeUV = BLEND_RANGE_DEFAULT;
- m_wb.nlevs = m_wb.blendRange;
- m_wb.nlevsC = m_wb.blendRangeUV;
-
- if (m_wb.nlevs <= 0) m_wb.nlevs = 1; // Need levels for YUV processing
- if (m_wb.nlevsC > m_wb.nlevs) m_wb.nlevsC = m_wb.nlevs;
-
- m_wb.roundoffOverlap = 1.5;
-
- m_pFrameYPyr = NULL;
- m_pFrameUPyr = NULL;
- m_pFrameVPyr = NULL;
-
- m_pFrameYPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevs, (unsigned short) width, (unsigned short) height, BORDER);
- m_pFrameUPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevsC, (unsigned short) (width), (unsigned short) (height), BORDER);
- m_pFrameVPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevsC, (unsigned short) (width), (unsigned short) (height), BORDER);
-
- if (!m_pFrameYPyr || !m_pFrameUPyr || !m_pFrameVPyr)
- {
- LOGE("Error: Could not allocate pyramids for blending");
- return BLEND_RET_ERROR_MEMORY;
- }
-
- return BLEND_RET_OK;
-}
-
-inline double max(double a, double b) { return a > b ? a : b; }
-inline double min(double a, double b) { return a < b ? a : b; }
-
-void Blend::AlignToMiddleFrame(MosaicFrame **frames, int frames_size)
-{
- // Unwarp this frame and Warp the others to match
- MosaicFrame *mb = NULL;
- MosaicFrame *ref = frames[int(frames_size/2)]; // Middle frame
-
- double invtrs[3][3];
- inv33d(ref->trs, invtrs);
-
- for(int mfit = 0; mfit < frames_size; mfit++)
- {
- mb = frames[mfit];
- double temp[3][3];
- mult33d(temp, invtrs, mb->trs);
- memcpy(mb->trs, temp, sizeof(temp));
- normProjMat33d(mb->trs);
- }
-}
-
-int Blend::runBlend(MosaicFrame **oframes, MosaicFrame **rframes,
- int frames_size,
- ImageType &imageMosaicYVU, int &mosaicWidth, int &mosaicHeight,
- float &progress, bool &cancelComputation)
-{
- int ret;
- int numCenters;
-
- MosaicFrame **frames;
-
- // For THIN strip mode, accept all frames for blending
- if (m_wb.stripType == STRIP_TYPE_THIN)
- {
- frames = oframes;
- }
- else // For WIDE strip mode, first select the relevant frames to blend.
- {
- SelectRelevantFrames(oframes, frames_size, rframes, frames_size);
- frames = rframes;
- }
-
- ComputeBlendParameters(frames, frames_size, true);
- numCenters = frames_size;
-
- if (numCenters == 0)
- {
- LOGE("Error: No frames to blend");
- return BLEND_RET_ERROR;
- }
-
- if (!(m_AllSites = m_Triangulator.allocMemory(numCenters)))
- {
- return BLEND_RET_ERROR_MEMORY;
- }
-
- // Bounding rectangle (real numbers) of the final mosaic computed by projecting
- // each input frame into the mosaic coordinate system.
- BlendRect global_rect;
-
- global_rect.lft = global_rect.bot = 2e30; // min values
- global_rect.rgt = global_rect.top = -2e30; // max values
- MosaicFrame *mb = NULL;
- double halfwidth = width / 2.0;
- double halfheight = height / 2.0;
-
- double z, x0, y0, x1, y1, x2, y2, x3, y3;
-
- // Corners of the left-most and right-most frames respectively in the
- // mosaic coordinate system.
- double xLeftCorners[2] = {2e30, 2e30};
- double xRightCorners[2] = {-2e30, -2e30};
-
- // Corners of the top-most and bottom-most frames respectively in the
- // mosaic coordinate system.
- double yTopCorners[2] = {2e30, 2e30};
- double yBottomCorners[2] = {-2e30, -2e30};
-
-
- // Determine the extents of the final mosaic
- CSite *csite = m_AllSites ;
- for(int mfit = 0; mfit < frames_size; mfit++)
- {
- mb = frames[mfit];
-
- // Compute clipping for this frame's rect
- FrameToMosaicRect(mb->width, mb->height, mb->trs, mb->brect);
- // Clip global rect using this frame's rect
- ClipRect(mb->brect, global_rect);
-
- // Calculate the corner points
- FrameToMosaic(mb->trs, 0.0, 0.0, x0, y0);
- FrameToMosaic(mb->trs, 0.0, mb->height-1.0, x1, y1);
- FrameToMosaic(mb->trs, mb->width-1.0, mb->height-1.0, x2, y2);
- FrameToMosaic(mb->trs, mb->width-1.0, 0.0, x3, y3);
-
- if(x0 < xLeftCorners[0] || x1 < xLeftCorners[1]) // If either of the left corners is lower
- {
- xLeftCorners[0] = x0;
- xLeftCorners[1] = x1;
- }
-
- if(x3 > xRightCorners[0] || x2 > xRightCorners[1]) // If either of the right corners is higher
- {
- xRightCorners[0] = x3;
- xRightCorners[1] = x2;
- }
-
- if(y0 < yTopCorners[0] || y3 < yTopCorners[1]) // If either of the top corners is lower
- {
- yTopCorners[0] = y0;
- yTopCorners[1] = y3;
- }
-
- if(y1 > yBottomCorners[0] || y2 > yBottomCorners[1]) // If either of the bottom corners is higher
- {
- yBottomCorners[0] = y1;
- yBottomCorners[1] = y2;
- }
-
-
- // Compute the centroid of the warped region
- FindQuadCentroid(x0, y0, x1, y1, x2, y2, x3, y3, csite->getVCenter().x, csite->getVCenter().y);
-
- csite->setMb(mb);
- csite++;
- }
-
- // Get origin and sizes
-
- // Bounding rectangle (int numbers) of the final mosaic computed by projecting
- // each input frame into the mosaic coordinate system.
- MosaicRect fullRect;
-
- fullRect.left = (int) floor(global_rect.lft); // min-x
- fullRect.top = (int) floor(global_rect.bot); // min-y
- fullRect.right = (int) ceil(global_rect.rgt); // max-x
- fullRect.bottom = (int) ceil(global_rect.top);// max-y
- Mwidth = (unsigned short) (fullRect.right - fullRect.left + 1);
- Mheight = (unsigned short) (fullRect.bottom - fullRect.top + 1);
-
- int xLeftMost, xRightMost;
- int yTopMost, yBottomMost;
-
- // Rounding up, so that we don't include the gray border.
- xLeftMost = max(0, max(xLeftCorners[0], xLeftCorners[1]) - fullRect.left + 1);
- xRightMost = min(Mwidth - 1, min(xRightCorners[0], xRightCorners[1]) - fullRect.left - 1);
-
- yTopMost = max(0, max(yTopCorners[0], yTopCorners[1]) - fullRect.top + 1);
- yBottomMost = min(Mheight - 1, min(yBottomCorners[0], yBottomCorners[1]) - fullRect.top - 1);
-
- if (xRightMost <= xLeftMost || yBottomMost <= yTopMost)
- {
- LOGE("RunBlend: aborting -consistency check failed,"
- "(xLeftMost, xRightMost, yTopMost, yBottomMost): (%d, %d, %d, %d)",
- xLeftMost, xRightMost, yTopMost, yBottomMost);
- return BLEND_RET_ERROR;
- }
-
- // Make sure image width is multiple of 4
- Mwidth = (unsigned short) ((Mwidth + 3) & ~3);
- Mheight = (unsigned short) ((Mheight + 3) & ~3); // Round up.
-
- ret = MosaicSizeCheck(LIMIT_SIZE_MULTIPLIER, LIMIT_HEIGHT_MULTIPLIER);
- if (ret != BLEND_RET_OK)
- {
- LOGE("RunBlend: aborting - mosaic size check failed, "
- "(frame_width, frame_height) vs (mosaic_width, mosaic_height): "
- "(%d, %d) vs (%d, %d)", width, height, Mwidth, Mheight);
- return ret;
- }
-
- LOGI("Allocate mosaic image for blending - size: %d x %d", Mwidth, Mheight);
- YUVinfo *imgMos = YUVinfo::allocateImage(Mwidth, Mheight);
- if (imgMos == NULL)
- {
- LOGE("RunBlend: aborting - couldn't alloc %d x %d mosaic image", Mwidth, Mheight);
- return BLEND_RET_ERROR_MEMORY;
- }
-
- // Set the Y image to 255 so we can distinguish when frame idx are written to it
- memset(imgMos->Y.ptr[0], 255, (imgMos->Y.width * imgMos->Y.height));
- // Set the v and u images to black
- memset(imgMos->V.ptr[0], 128, (imgMos->V.width * imgMos->V.height) << 1);
-
- // Do the triangulation. It returns a sorted list of edges
- SEdgeVector *edge;
- int n = m_Triangulator.triangulate(&edge, numCenters, width, height);
- m_Triangulator.linkNeighbors(edge, n, numCenters);
-
- // Bounding rectangle that determines the positioning of the rectangle that is
- // cropped out of the computed mosaic to get rid of the gray borders.
- MosaicRect cropping_rect;
-
- if (m_wb.horizontal)
- {
- cropping_rect.left = xLeftMost;
- cropping_rect.right = xRightMost;
- }
- else
- {
- cropping_rect.top = yTopMost;
- cropping_rect.bottom = yBottomMost;
- }
-
- // Do merging and blending :
- ret = DoMergeAndBlend(frames, numCenters, width, height, *imgMos, fullRect,
- cropping_rect, progress, cancelComputation);
-
- if (m_wb.blendingType == BLEND_TYPE_HORZ)
- CropFinalMosaic(*imgMos, cropping_rect);
-
-
- m_Triangulator.freeMemory(); // note: can be called even if delaunay_alloc() wasn't successful
-
- imageMosaicYVU = imgMos->Y.ptr[0];
-
-
- if (m_wb.blendingType == BLEND_TYPE_HORZ)
- {
- mosaicWidth = cropping_rect.right - cropping_rect.left + 1;
- mosaicHeight = cropping_rect.bottom - cropping_rect.top + 1;
- }
- else
- {
- mosaicWidth = Mwidth;
- mosaicHeight = Mheight;
- }
-
- return ret;
-}
-
-int Blend::MosaicSizeCheck(float sizeMultiplier, float heightMultiplier) {
- if (Mwidth < width || Mheight < height) {
- return BLEND_RET_ERROR;
- }
-
- if ((Mwidth * Mheight) > (width * height * sizeMultiplier)) {
- return BLEND_RET_ERROR;
- }
-
- // We won't do blending for the cases where users swing the device too much
- // in the secondary direction. We use a short side to determine the
- // secondary direction because users may hold the device in landsape
- // or portrait.
- int shortSide = min(Mwidth, Mheight);
- if (shortSide > height * heightMultiplier) {
- return BLEND_RET_ERROR;
- }
-
- return BLEND_RET_OK;
-}
-
-int Blend::FillFramePyramid(MosaicFrame *mb)
-{
- ImageType mbY, mbU, mbV;
- // Lay this image, centered into the temporary buffer
- mbY = mb->image;
- mbU = mb->getU();
- mbV = mb->getV();
-
- int h, w;
-
- for(h=0; h<height; h++)
- {
- ImageTypeShort yptr = m_pFrameYPyr->ptr[h];
- ImageTypeShort uptr = m_pFrameUPyr->ptr[h];
- ImageTypeShort vptr = m_pFrameVPyr->ptr[h];
-
- for(w=0; w<width; w++)
- {
- yptr[w] = (short) ((*(mbY++)) << 3);
- uptr[w] = (short) ((*(mbU++)) << 3);
- vptr[w] = (short) ((*(mbV++)) << 3);
- }
- }
-
- // Spread the image through the border
- PyramidShort::BorderSpread(m_pFrameYPyr, BORDER, BORDER, BORDER, BORDER);
- PyramidShort::BorderSpread(m_pFrameUPyr, BORDER, BORDER, BORDER, BORDER);
- PyramidShort::BorderSpread(m_pFrameVPyr, BORDER, BORDER, BORDER, BORDER);
-
- // Generate Laplacian pyramids
- if (!PyramidShort::BorderReduce(m_pFrameYPyr, m_wb.nlevs) || !PyramidShort::BorderExpand(m_pFrameYPyr, m_wb.nlevs, -1) ||
- !PyramidShort::BorderReduce(m_pFrameUPyr, m_wb.nlevsC) || !PyramidShort::BorderExpand(m_pFrameUPyr, m_wb.nlevsC, -1) ||
- !PyramidShort::BorderReduce(m_pFrameVPyr, m_wb.nlevsC) || !PyramidShort::BorderExpand(m_pFrameVPyr, m_wb.nlevsC, -1))
- {
- LOGE("Error: Could not generate Laplacian pyramids");
- return BLEND_RET_ERROR;
- }
- else
- {
- return BLEND_RET_OK;
- }
-}
-
-int Blend::DoMergeAndBlend(MosaicFrame **frames, int nsite,
- int width, int height, YUVinfo &imgMos, MosaicRect &rect,
- MosaicRect &cropping_rect, float &progress, bool &cancelComputation)
-{
- m_pMosaicYPyr = NULL;
- m_pMosaicUPyr = NULL;
- m_pMosaicVPyr = NULL;
-
- m_pMosaicYPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevs,(unsigned short)rect.Width(),(unsigned short)rect.Height(),BORDER);
- m_pMosaicUPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevsC,(unsigned short)rect.Width(),(unsigned short)rect.Height(),BORDER);
- m_pMosaicVPyr = PyramidShort::allocatePyramidPacked(m_wb.nlevsC,(unsigned short)rect.Width(),(unsigned short)rect.Height(),BORDER);
- if (!m_pMosaicYPyr || !m_pMosaicUPyr || !m_pMosaicVPyr)
- {
- LOGE("Error: Could not allocate pyramids for blending");
- return BLEND_RET_ERROR_MEMORY;
- }
-
- MosaicFrame *mb;
-
- CSite *esite = m_AllSites + nsite;
- int site_idx;
-
- // First go through each frame and for each mosaic pixel determine which frame it should come from
- site_idx = 0;
- for(CSite *csite = m_AllSites; csite < esite; csite++)
- {
- if(cancelComputation)
- {
- if (m_pMosaicVPyr) free(m_pMosaicVPyr);
- if (m_pMosaicUPyr) free(m_pMosaicUPyr);
- if (m_pMosaicYPyr) free(m_pMosaicYPyr);
- return BLEND_RET_CANCELLED;
- }
-
- mb = csite->getMb();
-
- mb->vcrect = mb->brect;
- ClipBlendRect(csite, mb->vcrect);
-
- ComputeMask(csite, mb->vcrect, mb->brect, rect, imgMos, site_idx);
-
- site_idx++;
- }
-
- ////////// imgMos.Y, imgMos.V, imgMos.U are used as follows //////////////
- ////////////////////// THIN STRIP MODE ///////////////////////////////////
-
- // imgMos.Y is used to store the index of the image from which each pixel
- // in the output mosaic can be read out for the thin-strip mode. Thus,
- // there is no special handling for pixels around the seam. Also, imgMos.Y
- // is set to 255 wherever we can't get its value from any input image e.g.
- // in the gray border areas. imgMos.V and imgMos.U are set to 128 for the
- // thin-strip mode.
-
- ////////////////////// WIDE STRIP MODE ///////////////////////////////////
-
- // imgMos.Y is used the same way as the thin-strip mode.
- // imgMos.V is used to store the index of the neighboring image which
- // should contribute to the color of an output pixel in a band around
- // the seam. Thus, in this band, we will crossfade between the color values
- // from the image index imgMos.Y and image index imgMos.V. imgMos.U is
- // used to store the weight (multiplied by 100) that each image will
- // contribute to the blending process. Thus, we start at 99% contribution
- // from the first image, then go to 50% contribution from each image at
- // the seam. Then, the contribution from the second image goes up to 99%.
-
- // For WIDE mode, set the pixel masks to guide the blender to cross-fade
- // between the images on either side of each seam:
- if (m_wb.stripType == STRIP_TYPE_WIDE)
- {
- if(m_wb.horizontal)
- {
- // Set the number of pixels around the seam to cross-fade between
- // the two component images,
- int tw = STRIP_CROSS_FADE_WIDTH_PXLS;
-
- // Proceed with the image index calculation for cross-fading
- // only if the cross-fading width is larger than 0
- if (tw > 0)
- {
- for(int y = 0; y < imgMos.Y.height; y++)
- {
- // Since we compare two adjecant pixels to determine
- // whether there is a seam, the termination condition of x
- // is set to imgMos.Y.width - tw, so that x+1 below
- // won't exceed the imgMos' boundary.
- for(int x = tw; x < imgMos.Y.width - tw; )
- {
- // Determine where the seam is...
- if (imgMos.Y.ptr[y][x] != imgMos.Y.ptr[y][x+1] &&
- imgMos.Y.ptr[y][x] != 255 &&
- imgMos.Y.ptr[y][x+1] != 255)
- {
- // Find the image indices on both sides of the seam
- unsigned char idx1 = imgMos.Y.ptr[y][x];
- unsigned char idx2 = imgMos.Y.ptr[y][x+1];
-
- for (int o = tw; o >= 0; o--)
- {
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y][x - o] = idx2;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y][x - o] = 50 + (99 - 50) * o / tw;
- }
-
- for (int o = 1; o <= tw; o++)
- {
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y][x + o] = idx1;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y][x + o] = imgMos.U.ptr[y][x - o];
- }
-
- x += (tw + 1);
- }
- else
- {
- x++;
- }
- }
- }
- }
- }
- else
- {
- // Set the number of pixels around the seam to cross-fade between
- // the two component images,
- int tw = STRIP_CROSS_FADE_WIDTH_PXLS;
-
- // Proceed with the image index calculation for cross-fading
- // only if the cross-fading width is larger than 0
- if (tw > 0)
- {
- for(int x = 0; x < imgMos.Y.width; x++)
- {
- // Since we compare two adjecant pixels to determine
- // whether there is a seam, the termination condition of y
- // is set to imgMos.Y.height - tw, so that y+1 below
- // won't exceed the imgMos' boundary.
- for(int y = tw; y < imgMos.Y.height - tw; )
- {
- // Determine where the seam is...
- if (imgMos.Y.ptr[y][x] != imgMos.Y.ptr[y+1][x] &&
- imgMos.Y.ptr[y][x] != 255 &&
- imgMos.Y.ptr[y+1][x] != 255)
- {
- // Find the image indices on both sides of the seam
- unsigned char idx1 = imgMos.Y.ptr[y][x];
- unsigned char idx2 = imgMos.Y.ptr[y+1][x];
-
- for (int o = tw; o >= 0; o--)
- {
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y - o][x] = idx2;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y - o][x] = 50 + (99 - 50) * o / tw;
- }
-
- for (int o = 1; o <= tw; o++)
- {
- // Set the image index to use for cross-fading
- imgMos.V.ptr[y + o][x] = idx1;
- // Set the intensity weights to use for cross-fading
- imgMos.U.ptr[y + o][x] = imgMos.U.ptr[y - o][x];
- }
-
- y += (tw + 1);
- }
- else
- {
- y++;
- }
- }
- }
- }
- }
-
- }
-
- // Now perform the actual blending using the frame assignment determined above
- site_idx = 0;
- for(CSite *csite = m_AllSites; csite < esite; csite++)
- {
- if(cancelComputation)
- {
- if (m_pMosaicVPyr) free(m_pMosaicVPyr);
- if (m_pMosaicUPyr) free(m_pMosaicUPyr);
- if (m_pMosaicYPyr) free(m_pMosaicYPyr);
- return BLEND_RET_CANCELLED;
- }
-
- mb = csite->getMb();
-
-
- if(FillFramePyramid(mb)!=BLEND_RET_OK)
- return BLEND_RET_ERROR;
-
- ProcessPyramidForThisFrame(csite, mb->vcrect, mb->brect, rect, imgMos, mb->trs, site_idx);
-
- progress += TIME_PERCENT_BLEND/nsite;
-
- site_idx++;
- }
-
-
- // Blend
- PerformFinalBlending(imgMos, cropping_rect);
-
- if (cropping_rect.Width() <= 0 || cropping_rect.Height() <= 0)
- {
- LOGE("Size of the cropping_rect is invalid - (width, height): (%d, %d)",
- cropping_rect.Width(), cropping_rect.Height());
- return BLEND_RET_ERROR;
- }
-
- if (m_pMosaicVPyr) free(m_pMosaicVPyr);
- if (m_pMosaicUPyr) free(m_pMosaicUPyr);
- if (m_pMosaicYPyr) free(m_pMosaicYPyr);
-
- progress += TIME_PERCENT_FINAL;
-
- return BLEND_RET_OK;
-}
-
-void Blend::CropFinalMosaic(YUVinfo &imgMos, MosaicRect &cropping_rect)
-{
- int i, j, k;
- ImageType yimg;
- ImageType uimg;
- ImageType vimg;
-
-
- yimg = imgMos.Y.ptr[0];
- uimg = imgMos.U.ptr[0];
- vimg = imgMos.V.ptr[0];
-
- k = 0;
- for (j = cropping_rect.top; j <= cropping_rect.bottom; j++)
- {
- for (i = cropping_rect.left; i <= cropping_rect.right; i++)
- {
- yimg[k] = yimg[j*imgMos.Y.width+i];
- k++;
- }
- }
- for (j = cropping_rect.top; j <= cropping_rect.bottom; j++)
- {
- for (i = cropping_rect.left; i <= cropping_rect.right; i++)
- {
- yimg[k] = vimg[j*imgMos.Y.width+i];
- k++;
- }
- }
- for (j = cropping_rect.top; j <= cropping_rect.bottom; j++)
- {
- for (i = cropping_rect.left; i <= cropping_rect.right; i++)
- {
- yimg[k] = uimg[j*imgMos.Y.width+i];
- k++;
- }
- }
-}
-
-int Blend::PerformFinalBlending(YUVinfo &imgMos, MosaicRect &cropping_rect)
-{
- if (!PyramidShort::BorderExpand(m_pMosaicYPyr, m_wb.nlevs, 1) || !PyramidShort::BorderExpand(m_pMosaicUPyr, m_wb.nlevsC, 1) ||
- !PyramidShort::BorderExpand(m_pMosaicVPyr, m_wb.nlevsC, 1))
- {
- LOGE("Error: Could not BorderExpand!");
- return BLEND_RET_ERROR;
- }
-
- ImageTypeShort myimg;
- ImageTypeShort muimg;
- ImageTypeShort mvimg;
- ImageType yimg;
- ImageType uimg;
- ImageType vimg;
-
- int cx = (int)imgMos.Y.width/2;
- int cy = (int)imgMos.Y.height/2;
-
- // 2D boolean array that contains true wherever the mosaic image data is
- // invalid (i.e. in the gray border).
- bool **b = new bool*[imgMos.Y.height];
-
- for(int j=0; j<imgMos.Y.height; j++)
- {
- b[j] = new bool[imgMos.Y.width];
- }
-
- // Copy the resulting image into the full image using the mask
- int i, j;
-
- yimg = imgMos.Y.ptr[0];
- uimg = imgMos.U.ptr[0];
- vimg = imgMos.V.ptr[0];
-
- for (j = 0; j < imgMos.Y.height; j++)
- {
- myimg = m_pMosaicYPyr->ptr[j];
- muimg = m_pMosaicUPyr->ptr[j];
- mvimg = m_pMosaicVPyr->ptr[j];
-
- for (i = 0; i<imgMos.Y.width; i++)
- {
- // A final mask was set up previously,
- // if the value is zero skip it, otherwise replace it.
- if (*yimg <255)
- {
- short value = (short) ((*myimg) >> 3);
- if (value < 0) value = 0;
- else if (value > 255) value = 255;
- *yimg = (unsigned char) value;
-
- value = (short) ((*muimg) >> 3);
- if (value < 0) value = 0;
- else if (value > 255) value = 255;
- *uimg = (unsigned char) value;
-
- value = (short) ((*mvimg) >> 3);
- if (value < 0) value = 0;
- else if (value > 255) value = 255;
- *vimg = (unsigned char) value;
-
- b[j][i] = false;
-
- }
- else
- { // set border color in here
- *yimg = (unsigned char) 96;
- *uimg = (unsigned char) 128;
- *vimg = (unsigned char) 128;
-
- b[j][i] = true;
- }
-
- yimg++;
- uimg++;
- vimg++;
- myimg++;
- muimg++;
- mvimg++;
- }
- }
-
- if(m_wb.horizontal)
- {
- //Scan through each row and increment top if the row contains any gray
- for (j = 0; j < imgMos.Y.height; j++)
- {
- for (i = cropping_rect.left; i < cropping_rect.right; i++)
- {
- if (b[j][i])
- {
- break; // to next row
- }
- }
-
- if (i == cropping_rect.right) //no gray pixel in this row!
- {
- cropping_rect.top = j;
- break;
- }
- }
-
- //Scan through each row and decrement bottom if the row contains any gray
- for (j = imgMos.Y.height-1; j >= 0; j--)
- {
- for (i = cropping_rect.left; i < cropping_rect.right; i++)
- {
- if (b[j][i])
- {
- break; // to next row
- }
- }
-
- if (i == cropping_rect.right) //no gray pixel in this row!
- {
- cropping_rect.bottom = j;
- break;
- }
- }
- }
- else // Vertical Mosaic
- {
- //Scan through each column and increment left if the column contains any gray
- for (i = 0; i < imgMos.Y.width; i++)
- {
- for (j = cropping_rect.top; j < cropping_rect.bottom; j++)
- {
- if (b[j][i])
- {
- break; // to next column
- }
- }
-
- if (j == cropping_rect.bottom) //no gray pixel in this column!
- {
- cropping_rect.left = i;
- break;
- }
- }
-
- //Scan through each column and decrement right if the column contains any gray
- for (i = imgMos.Y.width-1; i >= 0; i--)
- {
- for (j = cropping_rect.top; j < cropping_rect.bottom; j++)
- {
- if (b[j][i])
- {
- break; // to next column
- }
- }
-
- if (j == cropping_rect.bottom) //no gray pixel in this column!
- {
- cropping_rect.right = i;
- break;
- }
- }
-
- }
-
- RoundingCroppingSizeToMultipleOf8(cropping_rect);
-
- for(int j=0; j<imgMos.Y.height; j++)
- {
- delete b[j];
- }
-
- delete b;
-
- return BLEND_RET_OK;
-}
-
-void Blend::RoundingCroppingSizeToMultipleOf8(MosaicRect &rect) {
- int height = rect.bottom - rect.top + 1;
- int residue = height & 7;
- rect.bottom -= residue;
-
- int width = rect.right - rect.left + 1;
- residue = width & 7;
- rect.right -= residue;
-}
-
-void Blend::ComputeMask(CSite *csite, BlendRect &vcrect, BlendRect &brect, MosaicRect &rect, YUVinfo &imgMos, int site_idx)
-{
- PyramidShort *dptr = m_pMosaicYPyr;
-
- int nC = m_wb.nlevsC;
- int l = (int) ((vcrect.lft - rect.left));
- int b = (int) ((vcrect.bot - rect.top));
- int r = (int) ((vcrect.rgt - rect.left));
- int t = (int) ((vcrect.top - rect.top));
-
- if (vcrect.lft == brect.lft)
- l = (l <= 0) ? -BORDER : l - BORDER;
- else if (l < -BORDER)
- l = -BORDER;
-
- if (vcrect.bot == brect.bot)
- b = (b <= 0) ? -BORDER : b - BORDER;
- else if (b < -BORDER)
- b = -BORDER;
-
- if (vcrect.rgt == brect.rgt)
- r = (r >= dptr->width) ? dptr->width + BORDER - 1 : r + BORDER;
- else if (r >= dptr->width + BORDER)
- r = dptr->width + BORDER - 1;
-
- if (vcrect.top == brect.top)
- t = (t >= dptr->height) ? dptr->height + BORDER - 1 : t + BORDER;
- else if (t >= dptr->height + BORDER)
- t = dptr->height + BORDER - 1;
-
- // Walk the Region of interest and populate the pyramid
- for (int j = b; j <= t; j++)
- {
- int jj = j;
- double sj = jj + rect.top;
-
- for (int i = l; i <= r; i++)
- {
- int ii = i;
- // project point and then triangulate to neighbors
- double si = ii + rect.left;
-
- double dself = hypotSq(csite->getVCenter().x - si, csite->getVCenter().y - sj);
- int inMask = ((unsigned) ii < imgMos.Y.width &&
- (unsigned) jj < imgMos.Y.height) ? 1 : 0;
-
- if(!inMask)
- continue;
-
- // scan the neighbors to see if this is a valid position
- unsigned char mask = (unsigned char) 255;
- SEdgeVector *ce;
- int ecnt;
- for (ce = csite->getNeighbor(), ecnt = csite->getNumNeighbors(); ecnt--; ce++)
- {
- double d1 = hypotSq(m_AllSites[ce->second].getVCenter().x - si,
- m_AllSites[ce->second].getVCenter().y - sj);
- if (d1 < dself)
- {
- break;
- }
- }
-
- if (ecnt >= 0) continue;
-
- imgMos.Y.ptr[jj][ii] = (unsigned char)site_idx;
- }
- }
-}
-
-void Blend::ProcessPyramidForThisFrame(CSite *csite, BlendRect &vcrect, BlendRect &brect, MosaicRect &rect, YUVinfo &imgMos, double trs[3][3], int site_idx)
-{
- // Put the Region of interest (for all levels) into m_pMosaicYPyr
- double inv_trs[3][3];
- inv33d(trs, inv_trs);
-
- // Process each pyramid level
- PyramidShort *sptr = m_pFrameYPyr;
- PyramidShort *suptr = m_pFrameUPyr;
- PyramidShort *svptr = m_pFrameVPyr;
-
- PyramidShort *dptr = m_pMosaicYPyr;
- PyramidShort *duptr = m_pMosaicUPyr;
- PyramidShort *dvptr = m_pMosaicVPyr;
-
- int dscale = 0; // distance scale for the current level
- int nC = m_wb.nlevsC;
- for (int n = m_wb.nlevs; n--; dscale++, dptr++, sptr++, dvptr++, duptr++, svptr++, suptr++, nC--)
- {
- int l = (int) ((vcrect.lft - rect.left) / (1 << dscale));
- int b = (int) ((vcrect.bot - rect.top) / (1 << dscale));
- int r = (int) ((vcrect.rgt - rect.left) / (1 << dscale) + .5);
- int t = (int) ((vcrect.top - rect.top) / (1 << dscale) + .5);
-
- if (vcrect.lft == brect.lft)
- l = (l <= 0) ? -BORDER : l - BORDER;
- else if (l < -BORDER)
- l = -BORDER;
-
- if (vcrect.bot == brect.bot)
- b = (b <= 0) ? -BORDER : b - BORDER;
- else if (b < -BORDER)
- b = -BORDER;
-
- if (vcrect.rgt == brect.rgt)
- r = (r >= dptr->width) ? dptr->width + BORDER - 1 : r + BORDER;
- else if (r >= dptr->width + BORDER)
- r = dptr->width + BORDER - 1;
-
- if (vcrect.top == brect.top)
- t = (t >= dptr->height) ? dptr->height + BORDER - 1 : t + BORDER;
- else if (t >= dptr->height + BORDER)
- t = dptr->height + BORDER - 1;
-
- // Walk the Region of interest and populate the pyramid
- for (int j = b; j <= t; j++)
- {
- int jj = (j << dscale);
- double sj = jj + rect.top;
-
- for (int i = l; i <= r; i++)
- {
- int ii = (i << dscale);
- // project point and then triangulate to neighbors
- double si = ii + rect.left;
-
- int inMask = ((unsigned) ii < imgMos.Y.width &&
- (unsigned) jj < imgMos.Y.height) ? 1 : 0;
-
- if(inMask && imgMos.Y.ptr[jj][ii] != site_idx &&
- imgMos.V.ptr[jj][ii] != site_idx &&
- imgMos.Y.ptr[jj][ii] != 255)
- continue;
-
- // Setup weights for cross-fading
- // Weight of the intensity already in the output pixel
- double wt0 = 0.0;
- // Weight of the intensity from the input pixel (current frame)
- double wt1 = 1.0;
-
- if (m_wb.stripType == STRIP_TYPE_WIDE)
- {
- if(inMask && imgMos.Y.ptr[jj][ii] != 255)
- {
- // If not on a seam OR pyramid level exceeds
- // maximum level for cross-fading.
- if((imgMos.V.ptr[jj][ii] == 128) ||
- (dscale > STRIP_CROSS_FADE_MAX_PYR_LEVEL))
- {
- wt0 = 0.0;
- wt1 = 1.0;
- }
- else
- {
- wt0 = 1.0;
- wt1 = ((imgMos.Y.ptr[jj][ii] == site_idx) ?
- (double)imgMos.U.ptr[jj][ii] / 100.0 :
- 1.0 - (double)imgMos.U.ptr[jj][ii] / 100.0);
- }
- }
- }
-
- // Project this mosaic point into the original frame coordinate space
- double xx, yy;
-
- MosaicToFrame(inv_trs, si, sj, xx, yy);
-
- if (xx < 0.0 || yy < 0.0 || xx > width - 1.0 || yy > height - 1.0)
- {
- if(inMask)
- {
- imgMos.Y.ptr[jj][ii] = 255;
- wt0 = 0.0f;
- wt1 = 1.0f;
- }
- }
-
- xx /= (1 << dscale);
- yy /= (1 << dscale);
-
-
- int x1 = (xx >= 0.0) ? (int) xx : (int) floor(xx);
- int y1 = (yy >= 0.0) ? (int) yy : (int) floor(yy);
-
- // Final destination in extended pyramid
-#ifndef LINEAR_INTERP
- if(inSegment(x1, sptr->width, BORDER-1) &&
- inSegment(y1, sptr->height, BORDER-1))
- {
- double xfrac = xx - x1;
- double yfrac = yy - y1;
- dptr->ptr[j][i] = (short) (wt0 * dptr->ptr[j][i] + .5 +
- wt1 * ciCalc(sptr, x1, y1, xfrac, yfrac));
- if (dvptr >= m_pMosaicVPyr && nC > 0)
- {
- duptr->ptr[j][i] = (short) (wt0 * duptr->ptr[j][i] + .5 +
- wt1 * ciCalc(suptr, x1, y1, xfrac, yfrac));
- dvptr->ptr[j][i] = (short) (wt0 * dvptr->ptr[j][i] + .5 +
- wt1 * ciCalc(svptr, x1, y1, xfrac, yfrac));
- }
- }
-#else
- if(inSegment(x1, sptr->width, BORDER) && inSegment(y1, sptr->height, BORDER))
- {
- int x2 = x1 + 1;
- int y2 = y1 + 1;
- double xfrac = xx - x1;
- double yfrac = yy - y1;
- double y1val = sptr->ptr[y1][x1] +
- (sptr->ptr[y1][x2] - sptr->ptr[y1][x1]) * xfrac;
- double y2val = sptr->ptr[y2][x1] +
- (sptr->ptr[y2][x2] - sptr->ptr[y2][x1]) * xfrac;
- dptr->ptr[j][i] = (short) (y1val + yfrac * (y2val - y1val));
-
- if (dvptr >= m_pMosaicVPyr && nC > 0)
- {
- y1val = suptr->ptr[y1][x1] +
- (suptr->ptr[y1][x2] - suptr->ptr[y1][x1]) * xfrac;
- y2val = suptr->ptr[y2][x1] +
- (suptr->ptr[y2][x2] - suptr->ptr[y2][x1]) * xfrac;
-
- duptr->ptr[j][i] = (short) (y1val + yfrac * (y2val - y1val));
-
- y1val = svptr->ptr[y1][x1] +
- (svptr->ptr[y1][x2] - svptr->ptr[y1][x1]) * xfrac;
- y2val = svptr->ptr[y2][x1] +
- (svptr->ptr[y2][x2] - svptr->ptr[y2][x1]) * xfrac;
-
- dvptr->ptr[j][i] = (short) (y1val + yfrac * (y2val - y1val));
- }
- }
-#endif
- else
- {
- clipToSegment(x1, sptr->width, BORDER);
- clipToSegment(y1, sptr->height, BORDER);
-
- dptr->ptr[j][i] = (short) (wt0 * dptr->ptr[j][i] + 0.5 +
- wt1 * sptr->ptr[y1][x1] );
- if (dvptr >= m_pMosaicVPyr && nC > 0)
- {
- dvptr->ptr[j][i] = (short) (wt0 * dvptr->ptr[j][i] +
- 0.5 + wt1 * svptr->ptr[y1][x1] );
- duptr->ptr[j][i] = (short) (wt0 * duptr->ptr[j][i] +
- 0.5 + wt1 * suptr->ptr[y1][x1] );
- }
- }
- }
- }
- }
-}
-
-void Blend::MosaicToFrame(double trs[3][3], double x, double y, double &wx, double &wy)
-{
- double X, Y, z;
- if (m_wb.theta == 0.0)
- {
- X = x;
- Y = y;
- }
- else if (m_wb.horizontal)
- {
- double alpha = x * m_wb.direction / m_wb.width;
- double length = (y - alpha * m_wb.correction) * m_wb.direction + m_wb.radius;
- double deltaTheta = m_wb.theta * alpha;
- double sinTheta = sin(deltaTheta);
- double cosTheta = sqrt(1.0 - sinTheta * sinTheta) * m_wb.direction;
- X = length * sinTheta + m_wb.x;
- Y = length * cosTheta + m_wb.y;
- }
- else
- {
- double alpha = y * m_wb.direction / m_wb.width;
- double length = (x - alpha * m_wb.correction) * m_wb.direction + m_wb.radius;
- double deltaTheta = m_wb.theta * alpha;
- double sinTheta = sin(deltaTheta);
- double cosTheta = sqrt(1.0 - sinTheta * sinTheta) * m_wb.direction;
- Y = length * sinTheta + m_wb.y;
- X = length * cosTheta + m_wb.x;
- }
- z = ProjZ(trs, X, Y, 1.0);
- wx = ProjX(trs, X, Y, z, 1.0);
- wy = ProjY(trs, X, Y, z, 1.0);
-}
-
-void Blend::FrameToMosaic(double trs[3][3], double x, double y, double &wx, double &wy)
-{
- // Project into the intermediate Mosaic coordinate system
- double z = ProjZ(trs, x, y, 1.0);
- double X = ProjX(trs, x, y, z, 1.0);
- double Y = ProjY(trs, x, y, z, 1.0);
-
- if (m_wb.theta == 0.0)
- {
- // No rotation, then this is all we need to do.
- wx = X;
- wy = Y;
- }
- else if (m_wb.horizontal)
- {
- double deltaX = X - m_wb.x;
- double deltaY = Y - m_wb.y;
- double length = sqrt(deltaX * deltaX + deltaY * deltaY);
- double deltaTheta = asin(deltaX / length);
- double alpha = deltaTheta / m_wb.theta;
- wx = alpha * m_wb.width * m_wb.direction;
- wy = (length - m_wb.radius) * m_wb.direction + alpha * m_wb.correction;
- }
- else
- {
- double deltaX = X - m_wb.x;
- double deltaY = Y - m_wb.y;
- double length = sqrt(deltaX * deltaX + deltaY * deltaY);
- double deltaTheta = asin(deltaY / length);
- double alpha = deltaTheta / m_wb.theta;
- wy = alpha * m_wb.width * m_wb.direction;
- wx = (length - m_wb.radius) * m_wb.direction + alpha * m_wb.correction;
- }
-}
-
-
-
-// Clip the region of interest as small as possible by using the Voronoi edges of
-// the neighbors
-void Blend::ClipBlendRect(CSite *csite, BlendRect &brect)
-{
- SEdgeVector *ce;
- int ecnt;
- for (ce = csite->getNeighbor(), ecnt = csite->getNumNeighbors(); ecnt--; ce++)
- {
- // calculate the Voronoi bisector intersection
- const double epsilon = 1e-5;
- double dx = (m_AllSites[ce->second].getVCenter().x - m_AllSites[ce->first].getVCenter().x);
- double dy = (m_AllSites[ce->second].getVCenter().y - m_AllSites[ce->first].getVCenter().y);
- double xmid = m_AllSites[ce->first].getVCenter().x + dx/2.0;
- double ymid = m_AllSites[ce->first].getVCenter().y + dy/2.0;
- double inter;
-
- if (dx > epsilon)
- {
- // neighbor is on right
- if ((inter = m_wb.roundoffOverlap + xmid - dy * (((dy >= 0.0) ? brect.bot : brect.top) - ymid) / dx) < brect.rgt)
- brect.rgt = inter;
- }
- else if (dx < -epsilon)
- {
- // neighbor is on left
- if ((inter = -m_wb.roundoffOverlap + xmid - dy * (((dy >= 0.0) ? brect.bot : brect.top) - ymid) / dx) > brect.lft)
- brect.lft = inter;
- }
- if (dy > epsilon)
- {
- // neighbor is above
- if ((inter = m_wb.roundoffOverlap + ymid - dx * (((dx >= 0.0) ? brect.lft : brect.rgt) - xmid) / dy) < brect.top)
- brect.top = inter;
- }
- else if (dy < -epsilon)
- {
- // neighbor is below
- if ((inter = -m_wb.roundoffOverlap + ymid - dx * (((dx >= 0.0) ? brect.lft : brect.rgt) - xmid) / dy) > brect.bot)
- brect.bot = inter;
- }
- }
-}
-
-void Blend::FrameToMosaicRect(int width, int height, double trs[3][3], BlendRect &brect)
-{
- // We need to walk the perimeter since the borders can be bent.
- brect.lft = brect.bot = 2e30;
- brect.rgt = brect.top = -2e30;
- double xpos, ypos;
- double lasty = height - 1.0;
- double lastx = width - 1.0;
- int i;
-
- for (i = width; i--;)
- {
-
- FrameToMosaic(trs, (double) i, 0.0, xpos, ypos);
- ClipRect(xpos, ypos, brect);
- FrameToMosaic(trs, (double) i, lasty, xpos, ypos);
- ClipRect(xpos, ypos, brect);
- }
- for (i = height; i--;)
- {
- FrameToMosaic(trs, 0.0, (double) i, xpos, ypos);
- ClipRect(xpos, ypos, brect);
- FrameToMosaic(trs, lastx, (double) i, xpos, ypos);
- ClipRect(xpos, ypos, brect);
- }
-}
-
-void Blend::SelectRelevantFrames(MosaicFrame **frames, int frames_size,
- MosaicFrame **relevant_frames, int &relevant_frames_size)
-{
- MosaicFrame *first = frames[0];
- MosaicFrame *last = frames[frames_size-1];
- MosaicFrame *mb;
-
- double fxpos = first->trs[0][2], fypos = first->trs[1][2];
-
- double midX = last->width / 2.0;
- double midY = last->height / 2.0;
- double z = ProjZ(first->trs, midX, midY, 1.0);
- double firstX, firstY;
- double prevX = firstX = ProjX(first->trs, midX, midY, z, 1.0);
- double prevY = firstY = ProjY(first->trs, midX, midY, z, 1.0);
-
- relevant_frames[0] = first; // Add first frame by default
- relevant_frames_size = 1;
-
- for (int i = 0; i < frames_size - 1; i++)
- {
- mb = frames[i];
- double currX, currY;
- z = ProjZ(mb->trs, midX, midY, 1.0);
- currX = ProjX(mb->trs, midX, midY, z, 1.0);
- currY = ProjY(mb->trs, midX, midY, z, 1.0);
- double deltaX = currX - prevX;
- double deltaY = currY - prevY;
- double center2centerDist = sqrt(deltaY * deltaY + deltaX * deltaX);
-
- if (fabs(deltaX) > STRIP_SEPARATION_THRESHOLD_PXLS ||
- fabs(deltaY) > STRIP_SEPARATION_THRESHOLD_PXLS)
- {
- relevant_frames[relevant_frames_size] = mb;
- relevant_frames_size++;
-
- prevX = currX;
- prevY = currY;
- }
- }
-
- // Add last frame by default
- relevant_frames[relevant_frames_size] = last;
- relevant_frames_size++;
-}
-
-void Blend::ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360)
-{
- // For FULL and PAN modes, we do not unwarp the mosaic into a rectangular coordinate system
- // and so we set the theta to 0 and return.
- if (m_wb.blendingType != BLEND_TYPE_CYLPAN && m_wb.blendingType != BLEND_TYPE_HORZ)
- {
- m_wb.theta = 0.0;
- return;
- }
-
- MosaicFrame *first = frames[0];
- MosaicFrame *last = frames[frames_size-1];
- MosaicFrame *mb;
-
- double lxpos = last->trs[0][2], lypos = last->trs[1][2];
- double fxpos = first->trs[0][2], fypos = first->trs[1][2];
-
- // Calculate warp to produce proper stitching.
- // get x, y displacement
- double midX = last->width / 2.0;
- double midY = last->height / 2.0;
- double z = ProjZ(first->trs, midX, midY, 1.0);
- double firstX, firstY;
- double prevX = firstX = ProjX(first->trs, midX, midY, z, 1.0);
- double prevY = firstY = ProjY(first->trs, midX, midY, z, 1.0);
-
- double arcLength, lastTheta;
- m_wb.theta = lastTheta = arcLength = 0.0;
-
- // Step through all the frames to compute the total arc-length of the cone
- // swept while capturing the mosaic (in the original conical coordinate system).
- for (int i = 0; i < frames_size; i++)
- {
- mb = frames[i];
- double currX, currY;
- z = ProjZ(mb->trs, midX, midY, 1.0);
- currX = ProjX(mb->trs, midX, midY, z, 1.0);
- currY = ProjY(mb->trs, midX, midY, z, 1.0);
- double deltaX = currX - prevX;
- double deltaY = currY - prevY;
-
- // The arcLength is computed by summing the lengths of the chords
- // connecting the pairwise projected image centers of the input image frames.
- arcLength += sqrt(deltaY * deltaY + deltaX * deltaX);
-
- if (!is360)
- {
- double thisTheta = asin(mb->trs[1][0]);
- m_wb.theta += thisTheta - lastTheta;
- lastTheta = thisTheta;
- }
-
- prevX = currX;
- prevY = currY;
- }
-
- // Stretch this to end at the proper alignment i.e. the width of the
- // rectangle is determined by the arcLength computed above and the cone
- // sector angle is determined using the rotation of the last frame.
- m_wb.width = arcLength;
- if (is360) m_wb.theta = asin(last->trs[1][0]);
-
- // If there is no rotation, we're done.
- if (m_wb.theta != 0.0)
- {
- double dx = prevX - firstX;
- double dy = prevY - firstY;
-
- // If the mosaic was captured by sweeping horizontally
- if (abs(lxpos - fxpos) > abs(lypos - fypos))
- {
- m_wb.horizontal = 1;
- // Calculate radius position to make ends exactly the same Y offset
- double radiusTheta = dx / cos(3.14159 / 2.0 - m_wb.theta);
- m_wb.radius = dy + radiusTheta * cos(m_wb.theta);
- if (m_wb.radius < 0.0) m_wb.radius = -m_wb.radius;
- }
- else
- {
- m_wb.horizontal = 0;
- // Calculate radius position to make ends exactly the same Y offset
- double radiusTheta = dy / cos(3.14159 / 2.0 - m_wb.theta);
- m_wb.radius = dx + radiusTheta * cos(m_wb.theta);
- if (m_wb.radius < 0.0) m_wb.radius = -m_wb.radius;
- }
-
- // Determine major direction
- if (m_wb.horizontal)
- {
- // Horizontal strip
- // m_wb.x,y record the origin of the rectangle coordinate system.
- if (is360) m_wb.x = firstX;
- else
- {
- if (lxpos - fxpos < 0)
- {
- m_wb.x = firstX + midX;
- z = ProjZ(last->trs, 0.0, midY, 1.0);
- prevX = ProjX(last->trs, 0.0, midY, z, 1.0);
- prevY = ProjY(last->trs, 0.0, midY, z, 1.0);
- }
- else
- {
- m_wb.x = firstX - midX;
- z = ProjZ(last->trs, last->width - 1.0, midY, 1.0);
- prevX = ProjX(last->trs, last->width - 1.0, midY, z, 1.0);
- prevY = ProjY(last->trs, last->width - 1.0, midY, z, 1.0);
- }
- }
- dy = prevY - firstY;
- if (dy < 0.0) m_wb.direction = 1.0;
- else m_wb.direction = -1.0;
- m_wb.y = firstY - m_wb.radius * m_wb.direction;
- if (dy * m_wb.theta > 0.0) m_wb.width = -m_wb.width;
- }
- else
- {
- // Vertical strip
- if (is360) m_wb.y = firstY;
- else
- {
- if (lypos - fypos < 0)
- {
- m_wb.x = firstY + midY;
- z = ProjZ(last->trs, midX, 0.0, 1.0);
- prevX = ProjX(last->trs, midX, 0.0, z, 1.0);
- prevY = ProjY(last->trs, midX, 0.0, z, 1.0);
- }
- else
- {
- m_wb.x = firstX - midX;
- z = ProjZ(last->trs, midX, last->height - 1.0, 1.0);
- prevX = ProjX(last->trs, midX, last->height - 1.0, z, 1.0);
- prevY = ProjY(last->trs, midX, last->height - 1.0, z, 1.0);
- }
- }
- dx = prevX - firstX;
- if (dx < 0.0) m_wb.direction = 1.0;
- else m_wb.direction = -1.0;
- m_wb.x = firstX - m_wb.radius * m_wb.direction;
- if (dx * m_wb.theta > 0.0) m_wb.width = -m_wb.width;
- }
-
- // Calculate the correct correction factor
- double deltaX = prevX - m_wb.x;
- double deltaY = prevY - m_wb.y;
- double length = sqrt(deltaX * deltaX + deltaY * deltaY);
- double deltaTheta = (m_wb.horizontal) ? deltaX : deltaY;
- deltaTheta = asin(deltaTheta / length);
- m_wb.correction = ((m_wb.radius - length) * m_wb.direction) /
- (deltaTheta / m_wb.theta);
- }
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic/Blend.h b/jni_mosaic/feature_mos/src/mosaic/Blend.h
deleted file mode 100644
index 2c7ee5c5f..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Blend.h
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// Blend.h
-// $Id: Blend.h,v 1.23 2011/06/24 04:22:14 mbansal Exp $
-
-#ifndef BLEND_H
-#define BLEND_H
-
-#include "MosaicTypes.h"
-#include "Pyramid.h"
-#include "Delaunay.h"
-
-#define BLEND_RANGE_DEFAULT 6
-#define BORDER 8
-
-// Percent of total mosaicing time spent on each of the following operations
-const float TIME_PERCENT_ALIGN = 20.0;
-const float TIME_PERCENT_BLEND = 75.0;
-const float TIME_PERCENT_FINAL = 5.0;
-
-// This threshold determines the minimum separation between the image centers
-// of the input image frames for them to be accepted for blending in the
-// STRIP_TYPE_WIDE mode.
-const float STRIP_SEPARATION_THRESHOLD_PXLS = 10;
-
-// This threshold determines the number of pixels on either side of the strip
-// to cross-fade using the images contributing to each seam.
-const float STRIP_CROSS_FADE_WIDTH_PXLS = 2;
-// This specifies the maximum pyramid level to which cross-fading is applied.
-// The original image resolution is Level-0, half of that size is Level-1 and
-// so on. BLEND_RANGE_DEFAULT specifies the number of pyramid levels used by
-// the blending algorithm.
-const int STRIP_CROSS_FADE_MAX_PYR_LEVEL = 2;
-
-/**
- * Class for pyramid blending a mosaic.
- */
-class Blend {
-
-public:
-
- static const int BLEND_TYPE_NONE = -1;
- static const int BLEND_TYPE_FULL = 0;
- static const int BLEND_TYPE_PAN = 1;
- static const int BLEND_TYPE_CYLPAN = 2;
- static const int BLEND_TYPE_HORZ = 3;
-
- static const int STRIP_TYPE_THIN = 0;
- static const int STRIP_TYPE_WIDE = 1;
-
- static const int BLEND_RET_ERROR = -1;
- static const int BLEND_RET_OK = 0;
- static const int BLEND_RET_ERROR_MEMORY = 1;
- static const int BLEND_RET_CANCELLED = -2;
-
- Blend();
- ~Blend();
-
- int initialize(int blendingType, int stripType, int frame_width, int frame_height);
-
- int runBlend(MosaicFrame **frames, MosaicFrame **rframes, int frames_size, ImageType &imageMosaicYVU,
- int &mosaicWidth, int &mosaicHeight, float &progress, bool &cancelComputation);
-
-protected:
-
- PyramidShort *m_pFrameYPyr;
- PyramidShort *m_pFrameUPyr;
- PyramidShort *m_pFrameVPyr;
-
- PyramidShort *m_pMosaicYPyr;
- PyramidShort *m_pMosaicUPyr;
- PyramidShort *m_pMosaicVPyr;
-
- CDelaunay m_Triangulator;
- CSite *m_AllSites;
-
- BlendParams m_wb;
-
- // Height and width of individual frames
- int width, height;
-
- // Height and width of mosaic
- unsigned short Mwidth, Mheight;
-
- // Helper functions
- void FrameToMosaic(double trs[3][3], double x, double y, double &wx, double &wy);
- void MosaicToFrame(double trs[3][3], double x, double y, double &wx, double &wy);
- void FrameToMosaicRect(int width, int height, double trs[3][3], BlendRect &brect);
- void ClipBlendRect(CSite *csite, BlendRect &brect);
- void AlignToMiddleFrame(MosaicFrame **frames, int frames_size);
-
- int DoMergeAndBlend(MosaicFrame **frames, int nsite, int width, int height, YUVinfo &imgMos, MosaicRect &rect, MosaicRect &cropping_rect, float &progress, bool &cancelComputation);
- void ComputeMask(CSite *csite, BlendRect &vcrect, BlendRect &brect, MosaicRect &rect, YUVinfo &imgMos, int site_idx);
- void ProcessPyramidForThisFrame(CSite *csite, BlendRect &vcrect, BlendRect &brect, MosaicRect &rect, YUVinfo &imgMos, double trs[3][3], int site_idx);
-
- int FillFramePyramid(MosaicFrame *mb);
-
- // TODO: need to add documentation about the parameters
- void ComputeBlendParameters(MosaicFrame **frames, int frames_size, int is360);
- void SelectRelevantFrames(MosaicFrame **frames, int frames_size,
- MosaicFrame **relevant_frames, int &relevant_frames_size);
-
- int PerformFinalBlending(YUVinfo &imgMos, MosaicRect &cropping_rect);
- void CropFinalMosaic(YUVinfo &imgMos, MosaicRect &cropping_rect);
-
-private:
- static const float LIMIT_SIZE_MULTIPLIER = 5.0f * 2.0f;
- static const float LIMIT_HEIGHT_MULTIPLIER = 2.5f;
- int MosaicSizeCheck(float sizeMultiplier, float heightMultiplier);
- void RoundingCroppingSizeToMultipleOf8(MosaicRect& rect);
-};
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/CSite.h b/jni_mosaic/feature_mos/src/mosaic/CSite.h
deleted file mode 100644
index 928c1734b..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/CSite.h
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// CSite.h
-// $Id: CSite.h,v 1.3 2011/06/17 13:35:47 mbansal Exp $
-
-#ifndef TRIDEL_H
-#define TRIDEL_H
-
-#include "MosaicTypes.h"
-
-typedef struct
-{
- short first;
- short second;
-} SEdgeVector;
-
-typedef struct
-{
- double x;
- double y;
-} SVec2d;
-
-class CSite
-{
-private:
- MosaicFrame *mosaicFrame;
- SEdgeVector *neighbor;
- int numNeighbors;
- SVec2d voronoiCenter;
-
-public:
- CSite();
- ~CSite();
-
- inline MosaicFrame* getMb() { return mosaicFrame; }
- inline SEdgeVector* getNeighbor() { return neighbor; }
- inline int getNumNeighbors() { return numNeighbors; }
- inline SVec2d& getVCenter() { return voronoiCenter; }
- inline double X() { return voronoiCenter.x; }
- inline double Y() { return voronoiCenter.y; }
-
- inline void incrNumNeighbors() { numNeighbors++; }
- inline void setNumNeighbors(int num) { numNeighbors = num; }
- inline void setNeighbor(SEdgeVector *nb) { neighbor = nb; }
- inline void setMb(MosaicFrame *mb) { mosaicFrame = mb; }
-};
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/Delaunay.cpp b/jni_mosaic/feature_mos/src/mosaic/Delaunay.cpp
deleted file mode 100644
index 0ce09fc51..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Delaunay.cpp
+++ /dev/null
@@ -1,633 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Delaunay.cpp
-// $Id: Delaunay.cpp,v 1.10 2011/06/17 13:35:48 mbansal Exp $
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <memory.h>
-#include "Delaunay.h"
-
-#define QQ 9 // Optimal value as determined by testing
-#define DM 38 // 2^(1+DM/2) element sort capability. DM=38 for >10^6 elements
-#define NYL -1
-#define valid(l) ccw(orig(basel), dest(l), dest(basel))
-
-
-CDelaunay::CDelaunay()
-{
-}
-
-CDelaunay::~CDelaunay()
-{
-}
-
-// Allocate storage, construct triangulation, compute voronoi corners
-int CDelaunay::triangulate(SEdgeVector **edges, int n_sites, int width, int height)
-{
- EdgePointer cep;
-
- deleteAllEdges();
- buildTriangulation(n_sites);
- cep = consolidateEdges();
- *edges = ev;
-
- // Note: construction_list will change ev
- return constructList(cep, width, height);
-}
-
-// builds delaunay triangulation
-void CDelaunay::buildTriangulation(int size)
-{
- int i, rows;
- EdgePointer lefte, righte;
-
- rows = (int)( 0.5 + sqrt( (double) size / log( (double) size )));
-
- // Sort the pointers by x-coordinate of site
- for ( i=0 ; i < size ; i++ ) {
- sp[i] = (SitePointer) i;
- }
-
- spsortx( sp, 0, size-1 );
- build( 0, size-1, &lefte, &righte, rows );
- oneBndryEdge = lefte;
-}
-
-// Recursive Delaunay Triangulation Procedure
-// Contains modifications for axis-switching division.
-void CDelaunay::build(int lo, int hi, EdgePointer *le, EdgePointer *re, int rows)
-{
- EdgePointer a, b, c, ldo, rdi, ldi, rdo, maxx, minx;
- int split, lowrows;
- int low, high;
- SitePointer s1, s2, s3;
- low = lo;
- high = hi;
-
- if ( low < (high-2) ) {
- // more than three elements; do recursion
- minx = sp[low];
- maxx = sp[high];
- if (rows == 1) { // time to switch axis of division
- spsorty( sp, low, high);
- rows = 65536;
- }
- lowrows = rows/2;
- split = low - 1 + (int)
- (0.5 + ((double)(high-low+1) * ((double)lowrows / (double)rows)));
- build( low, split, &ldo, &ldi, lowrows );
- build( split+1, high, &rdi, &rdo, (rows-lowrows) );
- doMerge(&ldo, ldi, rdi, &rdo);
- while (orig(ldo) != minx) {
- ldo = rprev(ldo);
- }
- while (orig(rdo) != maxx) {
- rdo = (SitePointer) lprev(rdo);
- }
- *le = ldo;
- *re = rdo;
- }
- else if (low >= (high - 1)) { // two or one points
- a = makeEdge(sp[low], sp[high]);
- *le = a;
- *re = (EdgePointer) sym(a);
- } else { // three points
- // 3 cases: triangles of 2 orientations, and 3 points on a line
- a = makeEdge((s1 = sp[low]), (s2 = sp[low+1]));
- b = makeEdge(s2, (s3 = sp[high]));
- splice((EdgePointer) sym(a), b);
- if (ccw(s1, s3, s2)) {
- c = connectLeft(b, a);
- *le = (EdgePointer) sym(c);
- *re = c;
- } else {
- *le = a;
- *re = (EdgePointer) sym(b);
- if (ccw(s1, s2, s3)) {
- // not colinear
- c = connectLeft(b, a);
- }
- }
- }
-}
-
-// Quad-edge manipulation primitives
-EdgePointer CDelaunay::makeEdge(SitePointer origin, SitePointer destination)
-{
- EdgePointer temp, ans;
- temp = allocEdge();
- ans = temp;
-
- onext(temp) = ans;
- orig(temp) = origin;
- onext(++temp) = (EdgePointer) (ans + 3);
- onext(++temp) = (EdgePointer) (ans + 2);
- orig(temp) = destination;
- onext(++temp) = (EdgePointer) (ans + 1);
-
- return(ans);
-}
-
-void CDelaunay::splice(EdgePointer a, EdgePointer b)
-{
- EdgePointer alpha, beta, temp;
- alpha = (EdgePointer) rot(onext(a));
- beta = (EdgePointer) rot(onext(b));
- temp = onext(alpha);
- onext(alpha) = onext(beta);
- onext(beta) = temp;
- temp = onext(a);
- onext(a) = onext(b);
- onext(b) = temp;
-}
-
-EdgePointer CDelaunay::connectLeft(EdgePointer a, EdgePointer b)
-{
- EdgePointer ans;
- ans = makeEdge(dest(a), orig(b));
- splice(ans, (EdgePointer) lnext(a));
- splice((EdgePointer) sym(ans), b);
- return(ans);
-}
-
-EdgePointer CDelaunay::connectRight(EdgePointer a, EdgePointer b)
-{
- EdgePointer ans;
- ans = makeEdge(dest(a), orig(b));
- splice(ans, (EdgePointer) sym(a));
- splice((EdgePointer) sym(ans), (EdgePointer) oprev(b));
- return(ans);
-}
-
-// disconnects e from the rest of the structure and destroys it
-void CDelaunay::deleteEdge(EdgePointer e)
-{
- splice(e, (EdgePointer) oprev(e));
- splice((EdgePointer) sym(e), (EdgePointer) oprev(sym(e)));
- freeEdge(e);
-}
-
-//
-// Overall storage allocation
-//
-
-// Quad-edge storage allocation
-CSite *CDelaunay::allocMemory(int n)
-{
- unsigned int size;
-
- size = ((sizeof(CSite) + sizeof(SitePointer)) * n +
- (sizeof(SitePointer) + sizeof(EdgePointer)) * 12
- ) * n;
- if (!(sa = (CSite*) malloc(size))) {
- return NULL;
- }
- sp = (SitePointer *) (sa + n);
- ev = (SEdgeVector *) (org = sp + n);
- next = (EdgePointer *) (org + 12 * n);
- ei = (struct EDGE_INFO *) (next + 12 * n);
- return sa;
-}
-
-void CDelaunay::freeMemory()
-{
- if (sa) {
- free(sa);
- sa = (CSite*)NULL;
- }
-}
-
-//
-// Edge storage management
-//
-
-void CDelaunay::deleteAllEdges()
-{
- nextEdge = 0;
- availEdge = NYL;
-}
-
-EdgePointer CDelaunay::allocEdge()
-{
- EdgePointer ans;
-
- if (availEdge == NYL) {
- ans = nextEdge, nextEdge += 4;
- } else {
- ans = availEdge, availEdge = onext(availEdge);
- }
- return(ans);
-}
-
-void CDelaunay::freeEdge(EdgePointer e)
-{
- e ^= e & 3;
- onext(e) = availEdge;
- availEdge = e;
-}
-
-EdgePointer CDelaunay::consolidateEdges()
-{
- EdgePointer e;
- int i,j;
-
- while (availEdge != NYL) {
- nextEdge -= 4; e = availEdge; availEdge = onext(availEdge);
-
- if (e==nextEdge) {
- continue; // the one deleted was the last one anyway
- }
- if ((oneBndryEdge&~3) == nextEdge) {
- oneBndryEdge = (EdgePointer) (e | (oneBndryEdge&3));
- }
- for (i=0,j=3; i<4; i++,j=rot(j)) {
- onext(e+i) = onext(nextEdge+i);
- onext(rot(onext(e+i))) = (EdgePointer) (e+j);
- }
- }
- return nextEdge;
-}
-
-//
-// Sorting Routines
-//
-
-int CDelaunay::xcmpsp(int i, int j)
-{
- double d = sa[(i>=0)?sp[i]:sp1].X() - sa[(j>=0)?sp[j]:sp1].X();
- if ( d > 0. ) {
- return 1;
- }
- if ( d < 0. ) {
- return -1;
- }
- d = sa[(i>=0)?sp[i]:sp1].Y() - sa[(j>=0)?sp[j]:sp1].Y();
- if ( d > 0. ) {
- return 1;
- }
- if ( d < 0. ) {
- return -1;
- }
- return 0;
-}
-
-int CDelaunay::ycmpsp(int i, int j)
-{
- double d = sa[(i>=0)?sp[i]:sp1].Y() - sa[(j>=0)?sp[j]:sp1].Y();
- if ( d > 0. ) {
- return 1;
- }
- if ( d < 0. ) {
- return -1;
- }
- d = sa[(i>=0)?sp[i]:sp1].X() - sa[(j>=0)?sp[j]:sp1].X();
- if ( d > 0. ) {
- return 1;
- }
- if ( d < 0. ) {
- return -1;
- }
- return 0;
-}
-
-int CDelaunay::cmpev(int i, int j)
-{
- return (ev[i].first - ev[j].first);
-}
-
-void CDelaunay::swapsp(int i, int j)
-{
- int t;
- t = (i>=0) ? sp[i] : sp1;
-
- if (i>=0) {
- sp[i] = (j>=0)?sp[j]:sp1;
- } else {
- sp1 = (j>=0)?sp[j]:sp1;
- }
-
- if (j>=0) {
- sp[j] = (SitePointer) t;
- } else {
- sp1 = (SitePointer) t;
- }
-}
-
-void CDelaunay::swapev(int i, int j)
-{
- SEdgeVector temp;
-
- temp = ev[i];
- ev[i] = ev[j];
- ev[j] = temp;
-}
-
-void CDelaunay::copysp(int i, int j)
-{
- if (j>=0) {
- sp[j] = (i>=0)?sp[i]:sp1;
- } else {
- sp1 = (i>=0)?sp[i]:sp1;
- }
-}
-
-void CDelaunay::copyev(int i, int j)
-{
- ev[j] = ev[i];
-}
-
-void CDelaunay::spsortx(SitePointer *sp_in, int low, int high)
-{
- sp = sp_in;
- rcssort(low,high,-1,&CDelaunay::xcmpsp,&CDelaunay::swapsp,&CDelaunay::copysp);
-}
-
-void CDelaunay::spsorty(SitePointer *sp_in, int low, int high )
-{
- sp = sp_in;
- rcssort(low,high,-1,&CDelaunay::ycmpsp,&CDelaunay::swapsp,&CDelaunay::copysp);
-}
-
-void CDelaunay::rcssort(int lowelt, int highelt, int temp,
- int (CDelaunay::*comparison)(int,int),
- void (CDelaunay::*swap)(int,int),
- void (CDelaunay::*copy)(int,int))
-{
- int m,sij,si,sj,sL,sk;
- int stack[DM];
-
- if (highelt-lowelt<=1) {
- return;
- }
- if (highelt-lowelt>QQ) {
- m = 0;
- si = lowelt; sj = highelt;
- for (;;) { // partition [si,sj] about median-of-3.
- sij = (sj+si) >> 1;
-
- // Now to sort elements si,sij,sj into order & set temp=their median
- if ( (this->*comparison)( si,sij ) > 0 ) {
- (this->*swap)( si,sij );
- }
- if ( (this->*comparison)( sij,sj ) > 0 ) {
- (this->*swap)( sj,sij );
- if ( (this->*comparison)( si,sij ) > 0 ) {
- (this->*swap)( si,sij );
- }
- }
- (this->*copy)( sij,temp );
-
- // Now to partition into elements <=temp, >=temp, and ==temp.
- sk = si; sL = sj;
- do {
- do {
- sL--;
- } while( (this->*comparison)( sL,temp ) > 0 );
- do {
- sk++;
- } while( (this->*comparison)( temp,sk ) > 0 );
- if ( sk < sL ) {
- (this->*swap)( sL,sk );
- }
- } while(sk <= sL);
-
- // Now to recurse on shorter partition, store longer partition on stack
- if ( sL-si > sj-sk ) {
- if ( sL-si < QQ ) {
- if( m==0 ) {
- break; // empty stack && both partitions < QQ so break
- } else {
- sj = stack[--m];
- si = stack[--m];
- }
- }
- else {
- if ( sj-sk < QQ ) {
- sj = sL;
- } else {
- stack[m++] = si;
- stack[m++] = sL;
- si = sk;
- }
- }
- }
- else {
- if ( sj-sk < QQ ) {
- if ( m==0 ) {
- break; // empty stack && both partitions < QQ so break
- } else {
- sj = stack[--m];
- si = stack[--m];
- }
- }
- else {
- if ( sL-si < QQ ) {
- si = sk;
- } else {
- stack[m++] = sk;
- stack[m++] = sj;
- sj = sL;
- }
- }
- }
- }
- }
-
- // Now for 0 or Data bounded "straight insertion" sort of [0,nels-1]; if it is
- // known that el[-1] = -INF, then can omit the "sk>=0" test and save time.
- for (si=lowelt; si<highelt; si++) {
- if ( (this->*comparison)( si,si+1 ) > 0 ) {
- (this->*copy)( si+1,temp );
- sj = sk = si;
- sj++;
- do {
- (this->*copy)( sk,sj );
- sj = sk;
- sk--;
- } while ( (this->*comparison)( sk,temp ) > 0 && sk>=lowelt );
- (this->*copy)( temp,sj );
- }
- }
-}
-
-//
-// Geometric primitives
-//
-
-// incircle, as in the Guibas-Stolfi paper.
-int CDelaunay::incircle(SitePointer a, SitePointer b, SitePointer c, SitePointer d)
-{
- double adx, ady, bdx, bdy, cdx, cdy, dx, dy, nad, nbd, ncd;
- dx = sa[d].X();
- dy = sa[d].Y();
- adx = sa[a].X() - dx;
- ady = sa[a].Y() - dy;
- bdx = sa[b].X() - dx;
- bdy = sa[b].Y() - dy;
- cdx = sa[c].X() - dx;
- cdy = sa[c].Y() - dy;
- nad = adx*adx+ady*ady;
- nbd = bdx*bdx+bdy*bdy;
- ncd = cdx*cdx+cdy*cdy;
- return( (0.0 < (nad * (bdx * cdy - bdy * cdx)
- + nbd * (cdx * ady - cdy * adx)
- + ncd * (adx * bdy - ady * bdx))) ? TRUE : FALSE );
-}
-
-// TRUE iff A, B, C form a counterclockwise oriented triangle
-int CDelaunay::ccw(SitePointer a, SitePointer b, SitePointer c)
-{
- int result;
-
- double ax = sa[a].X();
- double bx = sa[b].X();
- double cx = sa[c].X();
- double ay = sa[a].Y();
- double by = sa[b].Y();
- double cy = sa[c].Y();
-
- double val = (ax - cx)*(by - cy) - (bx - cx)*(ay - cy);
- if ( val > 0.0) {
- return true;
- }
-
- return false;
-}
-
-//
-// The Merge Procedure.
-//
-
-void CDelaunay::doMerge(EdgePointer *ldo, EdgePointer ldi, EdgePointer rdi, EdgePointer *rdo)
-{
- int rvalid, lvalid;
- EdgePointer basel,lcand,rcand,t;
-
- for (;;) {
- while (ccw(orig(ldi), dest(ldi), orig(rdi))) {
- ldi = (EdgePointer) lnext(ldi);
- }
- if (ccw(dest(rdi), orig(rdi), orig(ldi))) {
- rdi = (EdgePointer)rprev(rdi);
- } else {
- break;
- }
- }
-
- basel = connectLeft((EdgePointer) sym(rdi), ldi);
- lcand = rprev(basel);
- rcand = (EdgePointer) oprev(basel);
- if (orig(basel) == orig(*rdo)) {
- *rdo = basel;
- }
- if (dest(basel) == orig(*ldo)) {
- *ldo = (EdgePointer) sym(basel);
- }
-
- for (;;) {
-#if 1
- if (valid(t=onext(lcand))) {
-#else
- t = (EdgePointer)onext(lcand);
- if (valid(basel, t)) {
-#endif
- while (incircle(dest(lcand), dest(t), orig(lcand), orig(basel))) {
- deleteEdge(lcand);
- lcand = t;
- t = onext(lcand);
- }
- }
-#if 1
- if (valid(t=(EdgePointer)oprev(rcand))) {
-#else
- t = (EdgePointer)oprev(rcand);
- if (valid(basel, t)) {
-#endif
- while (incircle(dest(t), dest(rcand), orig(rcand), dest(basel))) {
- deleteEdge(rcand);
- rcand = t;
- t = (EdgePointer)oprev(rcand);
- }
- }
-
-#if 1
- lvalid = valid(lcand);
- rvalid = valid(rcand);
-#else
- lvalid = valid(basel, lcand);
- rvalid = valid(basel, rcand);
-#endif
- if ((! lvalid) && (! rvalid)) {
- return;
- }
-
- if (!lvalid ||
- (rvalid && incircle(dest(lcand), orig(lcand), orig(rcand), dest(rcand)))) {
- basel = connectLeft(rcand, (EdgePointer) sym(basel));
- rcand = (EdgePointer) lnext(sym(basel));
- } else {
- basel = (EdgePointer) sym(connectRight(lcand, basel));
- lcand = rprev(basel);
- }
- }
-}
-
-int CDelaunay::constructList(EdgePointer last, int width, int height)
-{
- int c, i;
- EdgePointer curr, src, nex;
- SEdgeVector *currv, *prevv;
-
- c = (int) ((curr = (EdgePointer) ((last & ~3))) >> 1);
-
- for (last -= 4; last >= 0; last -= 4) {
- src = orig(last);
- nex = dest(last);
- orig(--curr) = src;
- orig(--curr) = nex;
- orig(--curr) = nex;
- orig(--curr) = src;
- }
- rcssort(0, c - 1, -1, &CDelaunay::cmpev, &CDelaunay::swapev, &CDelaunay::copyev);
-
- // Throw out any edges that are too far apart
- currv = prevv = ev;
- for (i = c; i--; currv++) {
- if ((int) fabs(sa[currv->first].getVCenter().x - sa[currv->second].getVCenter().x) <= width &&
- (int) fabs(sa[currv->first].getVCenter().y - sa[currv->second].getVCenter().y) <= height) {
- *(prevv++) = *currv;
- } else {
- c--;
- }
- }
- return c;
-}
-
-// Fill in site neighbor information
-void CDelaunay::linkNeighbors(SEdgeVector *edge, int nedge, int nsite)
-{
- int i;
-
- for (i = 0; i < nsite; i++) {
- sa[i].setNeighbor(edge);
- sa[i].setNumNeighbors(0);
- for (; edge->first == i && nedge; edge++, nedge--) {
- sa[i].incrNumNeighbors();
- }
- }
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic/Delaunay.h b/jni_mosaic/feature_mos/src/mosaic/Delaunay.h
deleted file mode 100644
index 7a450b5e4..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Delaunay.h
+++ /dev/null
@@ -1,126 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Delaunay.h
-// $Id: Delaunay.h,v 1.9 2011/06/17 13:35:48 mbansal Exp $
-
-#ifndef DELAUNAY_H
-#define DELAUNAY_H
-#include <stdio.h>
-#include <math.h>
-#include "CSite.h"
-#include "EdgePointerUtil.h"
-
-#ifndef TRUE
-#define TRUE 1==1
-#define FALSE 0==1
-#endif
-
-//******************************************************************************
-// Reference for Quad-edge data structure:
-//
-// Leonidas Guibas and Jorge Stolfi, "Primitives for the manipulation of general
-// subdivisions and the computations of Voronoi diagrams",
-// ACM Transactions on Graphics 4, 74-123 (1985).
-//
-//******************************************************************************
-
-//
-// Common data structures
-//
-
-typedef short SitePointer;
-typedef short TrianglePointer;
-
-class CDelaunay
-{
-private:
- CSite *sa;
- EdgePointer oneBndryEdge;
- EdgePointer *next;
- SitePointer *org;
- struct EDGE_INFO *ei;
- SitePointer *sp;
- SEdgeVector *ev;
-
- SitePointer sp1;
- EdgePointer nextEdge;
- EdgePointer availEdge;
-
-private:
- void build(int lo, int hi, EdgePointer *le, EdgePointer *re, int rows);
- void buildTriangulation(int size);
-
- EdgePointer allocEdge();
- void freeEdge(EdgePointer e);
-
- EdgePointer makeEdge(SitePointer origin, SitePointer destination);
- void deleteEdge(EdgePointer e);
-
- void splice(EdgePointer, EdgePointer);
- EdgePointer consolidateEdges();
- void deleteAllEdges();
-
- void spsortx(SitePointer *, int, int);
- void spsorty(SitePointer *, int, int);
-
- int cmpev(int i, int j);
- int xcmpsp(int i, int j);
- int ycmpsp(int i, int j);
-
- void swapsp(int i, int j);
- void swapev(int i, int j);
-
- void copysp(int i, int j);
- void copyev(int i, int j);
-
- void rcssort(int lowelt, int highelt, int temp,
- int (CDelaunay::*comparison)(int,int),
- void (CDelaunay::*swap)(int,int),
- void (CDelaunay::*copy)(int,int));
-
- void doMerge(EdgePointer *ldo, EdgePointer ldi, EdgePointer rdi, EdgePointer *rdo);
- EdgePointer connectLeft(EdgePointer a, EdgePointer b);
- EdgePointer connectRight(EdgePointer a, EdgePointer b);
- int ccw(SitePointer a, SitePointer b, SitePointer c);
- int incircle(SitePointer a, SitePointer b, SitePointer c, SitePointer d);
- int constructList(EdgePointer e, int width, int height);
-
-public:
- CDelaunay();
- ~CDelaunay();
-
- CSite *allocMemory(int nsite);
- void freeMemory();
- int triangulate(SEdgeVector **edge, int nsite, int width, int height);
- void linkNeighbors(SEdgeVector *edge, int nedge, int nsite);
-};
-
-#define onext(a) next[a]
-#define oprev(a) rot(onext(rot(a)))
-#define lnext(a) rot(onext(rotinv(a)))
-#define lprev(a) sym(onext(a))
-#define rnext(a) rotinv(onext(rot(a)))
-#define rprev(a) onext(sym(a))
-#define dnext(a) sym(onext(sym(a)))
-#define dprev(a) rotinv(onext(rotinv(a)))
-
-#define orig(a) org[a]
-#define dest(a) orig(sym(a))
-#define left(a) orig(rotinv(a))
-#define right(a) orig(rot(a))
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/EdgePointerUtil.h b/jni_mosaic/feature_mos/src/mosaic/EdgePointerUtil.h
deleted file mode 100644
index fad05d7ec..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/EdgePointerUtil.h
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef _EDGEPOINTERUTIL_H_
-#define _EDGEPOINTERUTIL_H_
-
-typedef short EdgePointer;
-
-inline EdgePointer sym(EdgePointer a)
-{
- return a ^ 2;
-}
-
-inline EdgePointer rot(EdgePointer a)
-{
- return (((a) + 1) & 3) | ((a) & ~3);
-}
-
-inline EdgePointer rotinv(EdgePointer a)
-{
- return (((a) + 3) & 3) | ((a) & ~3);
-}
-
-#endif //_EDGEPOINTERUTIL_H_
diff --git a/jni_mosaic/feature_mos/src/mosaic/Geometry.h b/jni_mosaic/feature_mos/src/mosaic/Geometry.h
deleted file mode 100644
index 0efa0f4a5..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Geometry.h
+++ /dev/null
@@ -1,156 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/////////////////////////////
-// Geometry.h
-// $Id: Geometry.h,v 1.2 2011/06/17 13:35:48 mbansal Exp $
-
-#pragma once
-#include "MosaicTypes.h"
-
-///////////////////////////////////////////////////////////////
-///////////////// BEG GLOBAL ROUTINES /////////////////////////
-///////////////////////////////////////////////////////////////
-
-
-inline double hypotSq(double a, double b)
-{
- return ((a)*(a)+(b)*(b));
-}
-
-inline void ClipRect(double x, double y, BlendRect &brect)
-{
- if (y < brect.bot) brect.bot = y;
- if (y > brect.top) brect.top = y;
- if (x < brect.lft) brect.lft = x;
- if (x > brect.rgt) brect.rgt = x;
-}
-
-inline void ClipRect(BlendRect rrect, BlendRect &brect)
-{
- if (rrect.bot < brect.bot) brect.bot = rrect.bot;
- if (rrect.top > brect.top) brect.top = rrect.top;
- if (rrect.lft < brect.lft) brect.lft = rrect.lft;
- if (rrect.rgt > brect.rgt) brect.rgt = rrect.rgt;
-}
-
-// Clip x to be within [-border,width+border-1]
-inline void clipToSegment(int &x, int width, int border)
-{
- if(x < -border)
- x = -border;
- else if(x >= width+border)
- x = width + border - 1;
-}
-
-// Return true if x within [-border,width+border-1]
-inline bool inSegment(int x, int width, int border)
-{
- return (x >= -border && x < width + border - 1);
-}
-
-inline void FindTriangleCentroid(double x0, double y0, double x1, double y1,
- double x2, double y2,
- double &mass, double &centX, double &centY)
-{
- // Calculate the centroid of the triangle
- centX = (x0 + x1 + x2) / 3.0;
- centY = (y0 + y1 + y2) / 3.0;
-
- // Calculate 2*Area for the triangle
- if (y0 == y2)
- {
- if (x0 == x1)
- {
- mass = fabs((y1 - y0) * (x2 - x0)); // Special case 1a
- }
- else
- {
- mass = fabs((y1 - y0) * (x1 - x0)); // Special case 1b
- }
- }
- else if (x0 == x2)
- {
- if (x0 == x1)
- {
- mass = fabs((x2 - x0) * (y2 - y0)); // Special case 2a
- }
- else
- {
- mass = fabs((x1 - x0) * (y2 - y0)); // Special case 2a
- }
- }
- else if (x1 == x2)
- {
- mass = fabs((x1 - x0) * (y2 - y0)); // Special case 3
- }
- else
- {
- // Calculate line equation from x0,y0 to x2,y2
- double dx = x2 - x0;
- double dy = y2 - y0;
- // Calculate the length of the side
- double len1 = sqrt(dx * dx + dy * dy);
- double m1 = dy / dx;
- double b1 = y0 - m1 * x0;
- // Calculate the line that goes through x1,y1 and is perpendicular to
- // the other line
- double m2 = 1.0 / m1;
- double b2 = y1 - m2 * x1;
- // Calculate the intersection of the two lines
- if (fabs( m1 - m2 ) > 1.e-6)
- {
- double x = (b2 - b1) / (m1 - m2);
- // the mass is the base * height
- dx = x1 - x;
- dy = y1 - m1 * x + b1;
- mass = len1 * sqrt(dx * dx + dy * dy);
- }
- else
- {
- mass = fabs( (y1 - y0) * (x2 - x0) );
- }
- }
-}
-
-inline void FindQuadCentroid(double x0, double y0, double x1, double y1, double x2, double y2, double x3, double y3,
- double &centX, double &centY)
-
-{
- // To find the centroid:
- // 1) Divide the quadrilateral into two triangles by scribing a diagonal
- // 2) Calculate the centroid of each triangle (the intersection of the angle bisections).
- // 3) Find the centroid of the quad by weighting each triangle centroids by their area.
-
- // Calculate the corner points
- double z;
-
- // The quad is split from x0,y0 to x2,y2
- double mass1, mass2, cent1x, cent2x, cent1y, cent2y;
- FindTriangleCentroid(x0, y0, x1, y1, x2, y2, mass1, cent1x, cent1y);
- FindTriangleCentroid(x0, y0, x3, y3, x2, y2, mass2, cent2x, cent2y);
-
- // determine position of quad centroid
- z = mass2 / (mass1 + mass2);
- centX = cent1x + (cent2x - cent1x) * z;
- centY = cent1y + (cent2y - cent1y) * z;
-}
-
-///////////////////////////////////////////////////////////////
-////////////////// END GLOBAL ROUTINES ////////////////////////
-///////////////////////////////////////////////////////////////
-
-
diff --git a/jni_mosaic/feature_mos/src/mosaic/ImageUtils.cpp b/jni_mosaic/feature_mos/src/mosaic/ImageUtils.cpp
deleted file mode 100644
index 6d0aac0c1..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/ImageUtils.cpp
+++ /dev/null
@@ -1,408 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// ImageUtils.cpp
-// $Id: ImageUtils.cpp,v 1.12 2011/06/17 13:35:48 mbansal Exp $
-
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/time.h>
-
-#include "ImageUtils.h"
-
-void ImageUtils::rgba2yvu(ImageType out, ImageType in, int width, int height)
-{
- int r,g,b, a;
- ImageType yimg = out;
- ImageType vimg = yimg + width*height;
- ImageType uimg = vimg + width*height;
- ImageType image = in;
-
- for (int ii = 0; ii < height; ii++) {
- for (int ij = 0; ij < width; ij++) {
- r = (*image++);
- g = (*image++);
- b = (*image++);
- a = (*image++);
-
- if (r < 0) r = 0;
- if (r > 255) r = 255;
- if (g < 0) g = 0;
- if (g > 255) g = 255;
- if (b < 0) b = 0;
- if (b > 255) b = 255;
-
- int val = (int) (REDY * r + GREENY * g + BLUEY * b) / 1000 + 16;
- if (val < 0) val = 0;
- if (val > 255) val = 255;
- *(yimg) = val;
-
- val = (int) (REDV * r - GREENV * g - BLUEV * b) / 1000 + 128;
- if (val < 0) val = 0;
- if (val > 255) val = 255;
- *(vimg) = val;
-
- val = (int) (-REDU * r - GREENU * g + BLUEU * b) / 1000 + 128;
- if (val < 0) val = 0;
- if (val > 255) val = 255;
- *(uimg) = val;
-
- yimg++;
- uimg++;
- vimg++;
- }
- }
-}
-
-
-void ImageUtils::rgb2yvu(ImageType out, ImageType in, int width, int height)
-{
- int r,g,b;
- ImageType yimg = out;
- ImageType vimg = yimg + width*height;
- ImageType uimg = vimg + width*height;
- ImageType image = in;
-
- for (int ii = 0; ii < height; ii++) {
- for (int ij = 0; ij < width; ij++) {
- r = (*image++);
- g = (*image++);
- b = (*image++);
-
- if (r < 0) r = 0;
- if (r > 255) r = 255;
- if (g < 0) g = 0;
- if (g > 255) g = 255;
- if (b < 0) b = 0;
- if (b > 255) b = 255;
-
- int val = (int) (REDY * r + GREENY * g + BLUEY * b) / 1000 + 16;
- if (val < 0) val = 0;
- if (val > 255) val = 255;
- *(yimg) = val;
-
- val = (int) (REDV * r - GREENV * g - BLUEV * b) / 1000 + 128;
- if (val < 0) val = 0;
- if (val > 255) val = 255;
- *(vimg) = val;
-
- val = (int) (-REDU * r - GREENU * g + BLUEU * b) / 1000 + 128;
- if (val < 0) val = 0;
- if (val > 255) val = 255;
- *(uimg) = val;
-
- yimg++;
- uimg++;
- vimg++;
- }
- }
-}
-
-ImageType ImageUtils::rgb2gray(ImageType in, int width, int height)
-{
- int r,g,b, nr, ng, nb, val;
- ImageType gray = NULL;
- ImageType image = in;
- ImageType out = ImageUtils::allocateImage(width, height, 1);
- ImageType outCopy = out;
-
- for (int ii = 0; ii < height; ii++) {
- for (int ij = 0; ij < width; ij++) {
- r = (*image++);
- g = (*image++);
- b = (*image++);
-
- if (r < 0) r = 0;
- if (r > 255) r = 255;
- if (g < 0) g = 0;
- if (g > 255) g = 255;
- if (b < 0) b = 0;
- if (b > 255) b = 255;
-
- (*outCopy) = ( 0.3*r + 0.59*g + 0.11*b);
-
- outCopy++;
- }
- }
-
- return out;
-}
-
-ImageType ImageUtils::rgb2gray(ImageType out, ImageType in, int width, int height)
-{
- int r,g,b, nr, ng, nb, val;
- ImageType gray = out;
- ImageType image = in;
- ImageType outCopy = out;
-
- for (int ii = 0; ii < height; ii++) {
- for (int ij = 0; ij < width; ij++) {
- r = (*image++);
- g = (*image++);
- b = (*image++);
-
- if (r < 0) r = 0;
- if (r > 255) r = 255;
- if (g < 0) g = 0;
- if (g > 255) g = 255;
- if (b < 0) b = 0;
- if (b > 255) b = 255;
-
- (*outCopy) = ( 0.3*r + 0.59*g + 0.11*b);
-
- outCopy++;
- }
- }
-
- return out;
-
-}
-
-ImageType *ImageUtils::imageTypeToRowPointers(ImageType in, int width, int height)
-{
- int i;
- int m_h = height;
- int m_w = width;
-
- ImageType *m_rows = new ImageType[m_h];
-
- for (i=0;i<m_h;i++) {
- m_rows[i] = &in[(m_w)*i];
- }
- return m_rows;
-}
-
-void ImageUtils::yvu2rgb(ImageType out, ImageType in, int width, int height)
-{
- int y,v,u, r, g, b;
- unsigned char *yimg = in;
- unsigned char *vimg = yimg + width*height;
- unsigned char *uimg = vimg + width*height;
- unsigned char *image = out;
-
- for (int i = 0; i < height; i++) {
- for (int j = 0; j < width; j++) {
-
- y = (*yimg);
- v = (*vimg);
- u = (*uimg);
-
- if (y < 0) y = 0;
- if (y > 255) y = 255;
- if (u < 0) u = 0;
- if (u > 255) u = 255;
- if (v < 0) v = 0;
- if (v > 255) v = 255;
-
- b = (int) ( 1.164*(y - 16) + 2.018*(u-128));
- g = (int) ( 1.164*(y - 16) - 0.813*(v-128) - 0.391*(u-128));
- r = (int) ( 1.164*(y - 16) + 1.596*(v-128));
-
- if (r < 0) r = 0;
- if (r > 255) r = 255;
- if (g < 0) g = 0;
- if (g > 255) g = 255;
- if (b < 0) b = 0;
- if (b > 255) b = 255;
-
- *(image++) = r;
- *(image++) = g;
- *(image++) = b;
-
- yimg++;
- uimg++;
- vimg++;
-
- }
- }
-}
-
-void ImageUtils::yvu2bgr(ImageType out, ImageType in, int width, int height)
-{
- int y,v,u, r, g, b;
- unsigned char *yimg = in;
- unsigned char *vimg = yimg + width*height;
- unsigned char *uimg = vimg + width*height;
- unsigned char *image = out;
-
- for (int i = 0; i < height; i++) {
- for (int j = 0; j < width; j++) {
-
- y = (*yimg);
- v = (*vimg);
- u = (*uimg);
-
- if (y < 0) y = 0;
- if (y > 255) y = 255;
- if (u < 0) u = 0;
- if (u > 255) u = 255;
- if (v < 0) v = 0;
- if (v > 255) v = 255;
-
- b = (int) ( 1.164*(y - 16) + 2.018*(u-128));
- g = (int) ( 1.164*(y - 16) - 0.813*(v-128) - 0.391*(u-128));
- r = (int) ( 1.164*(y - 16) + 1.596*(v-128));
-
- if (r < 0) r = 0;
- if (r > 255) r = 255;
- if (g < 0) g = 0;
- if (g > 255) g = 255;
- if (b < 0) b = 0;
- if (b > 255) b = 255;
-
- *(image++) = b;
- *(image++) = g;
- *(image++) = r;
-
- yimg++;
- uimg++;
- vimg++;
-
- }
- }
-}
-
-
-ImageType ImageUtils::readBinaryPPM(const char *filename, int &width, int &height)
-{
-
- FILE *imgin = NULL;
- int mval=0, format=0, eret;
- ImageType ret = IMAGE_TYPE_NOIMAGE;
-
- imgin = fopen(filename, "r");
- if (imgin == NULL) {
- fprintf(stderr, "Error: Filename %s not found\n", filename);
- return ret;
- }
-
- eret = fscanf(imgin, "P%d\n", &format);
- if (format != 6) {
- fprintf(stderr, "Error: readBinaryPPM only supports PPM format (P6)\n");
- return ret;
- }
-
- eret = fscanf(imgin, "%d %d\n", &width, &height);
- eret = fscanf(imgin, "%d\n", &mval);
- ret = allocateImage(width, height, IMAGE_TYPE_NUM_CHANNELS);
- eret = fread(ret, sizeof(ImageTypeBase), IMAGE_TYPE_NUM_CHANNELS*width*height, imgin);
-
- fclose(imgin);
-
- return ret;
-
-}
-
-void ImageUtils::writeBinaryPPM(ImageType image, const char *filename, int width, int height, int numChannels)
-{
- FILE *imgout = fopen(filename, "w");
-
- if (imgout == NULL) {
- fprintf(stderr, "Error: Filename %s could not be opened for writing\n", filename);
- return;
- }
-
- if (numChannels == 3) {
- fprintf(imgout, "P6\n%d %d\n255\n", width, height);
- } else if (numChannels == 1) {
- fprintf(imgout, "P5\n%d %d\n255\n", width, height);
- } else {
- fprintf(stderr, "Error: writeBinaryPPM: Unsupported number of channels\n");
- }
- fwrite(image, sizeof(ImageTypeBase), numChannels*width*height, imgout);
-
- fclose(imgout);
-
-}
-
-ImageType ImageUtils::allocateImage(int width, int height, int numChannels, short int border)
-{
- int overallocation = 256;
- return (ImageType) calloc(width*height*numChannels+overallocation, sizeof(ImageTypeBase));
-}
-
-
-void ImageUtils::freeImage(ImageType image)
-{
- free(image);
-}
-
-
-// allocation of one color image used for tmp buffers, etc.
-// format of contiguous memory block:
-// YUVInfo struct (type + BimageInfo for Y,U, and V),
-// Y row pointers
-// U row pointers
-// V row pointers
-// Y image pixels
-// U image pixels
-// V image pixels
-YUVinfo *YUVinfo::allocateImage(unsigned short width, unsigned short height)
-{
- unsigned short heightUV, widthUV;
-
- widthUV = width;
- heightUV = height;
-
- // figure out how much space to hold all pixels...
- int size = ((width * height * 3) + 8);
- unsigned char *position = 0;
-
- // VC 8 does not like calling free on yuv->Y.ptr since it is in
- // the middle of a block. So rearrange the memory layout so after
- // calling mapYUVInforToImage yuv->Y.ptr points to the begginning
- // of the calloc'ed block.
- YUVinfo *yuv = (YUVinfo *) calloc(sizeof(YUVinfo), 1);
- if (yuv) {
- yuv->Y.width = yuv->Y.pitch = width;
- yuv->Y.height = height;
- yuv->Y.border = yuv->U.border = yuv->V.border = (unsigned short) 0;
- yuv->U.width = yuv->U.pitch = yuv->V.width = yuv->V.pitch = widthUV;
- yuv->U.height = yuv->V.height = heightUV;
-
- unsigned char* block = (unsigned char*) calloc(
- sizeof(unsigned char *) * (height + heightUV + heightUV) +
- sizeof(unsigned char) * size, 1);
-
- position = block;
- unsigned char **y = (unsigned char **) (block + size);
-
- /* Initialize and assign row pointers */
- yuv->Y.ptr = y;
- yuv->V.ptr = &y[height];
- yuv->U.ptr = &y[height + heightUV];
- }
- if (size)
- mapYUVInfoToImage(yuv, position);
- return yuv;
-}
-
-// wrap YUVInfo row pointers around 3 contiguous image (color component) planes.
-// position = starting pixel in image.
-void YUVinfo::mapYUVInfoToImage(YUVinfo *img, unsigned char *position)
-{
- int i;
- for (i = 0; i < img->Y.height; i++, position += img->Y.width)
- img->Y.ptr[i] = position;
- for (i = 0; i < img->V.height; i++, position += img->V.width)
- img->V.ptr[i] = position;
- for (i = 0; i < img->U.height; i++, position += img->U.width)
- img->U.ptr[i] = position;
-}
-
-
diff --git a/jni_mosaic/feature_mos/src/mosaic/ImageUtils.h b/jni_mosaic/feature_mos/src/mosaic/ImageUtils.h
deleted file mode 100644
index 92965ca81..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/ImageUtils.h
+++ /dev/null
@@ -1,173 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// ImageUtils.h
-// $Id: ImageUtils.h,v 1.9 2011/05/16 15:33:06 mbansal Exp $
-
-#ifndef IMAGE_UTILS_H
-#define IMAGE_UTILS_H
-
-#include <stdlib.h>
-
-/**
- * Definition of basic image types
- */
-typedef unsigned char ImageTypeBase;
-typedef ImageTypeBase *ImageType;
-
-typedef short ImageTypeShortBase;
-typedef ImageTypeShortBase *ImageTypeShort;
-
-typedef float ImageTypeFloatBase;
-typedef ImageTypeFloatBase *ImageTypeFloat;
-
-
-class ImageUtils {
-public:
-
- /**
- * Default number of channels in image.
- */
- static const int IMAGE_TYPE_NUM_CHANNELS = 3;
-
- /**
- * Definition of an empty image.
- */
- static const int IMAGE_TYPE_NOIMAGE = 0;
-
- /**
- * Convert image from BGR (interlaced) to YVU (non-interlaced)
- *
- * Arguments:
- * out: Resulting image (note must be preallocated before
- * call)
- * in: Input image
- * width: Width of input image
- * height: Height of input image
- */
- static void rgb2yvu(ImageType out, ImageType in, int width, int height);
-
- static void rgba2yvu(ImageType out, ImageType in, int width, int height);
-
- /**
- * Convert image from YVU (non-interlaced) to BGR (interlaced)
- *
- * Arguments:
- * out: Resulting image (note must be preallocated before
- * call)
- * in: Input image
- * width: Width of input image
- * height: Height of input image
- */
- static void yvu2rgb(ImageType out, ImageType in, int width, int height);
- static void yvu2bgr(ImageType out, ImageType in, int width, int height);
-
- /**
- * Convert image from BGR to grayscale
- *
- * Arguments:
- * in: Input image
- * width: Width of input image
- * height: Height of input image
- *
- * Return:
- * Pointer to resulting image (allocation is done here, free
- * must be done by caller)
- */
- static ImageType rgb2gray(ImageType in, int width, int height);
- static ImageType rgb2gray(ImageType out, ImageType in, int width, int height);
-
- /**
- * Read a binary PPM image
- */
- static ImageType readBinaryPPM(const char *filename, int &width, int &height);
-
- /**
- * Write a binary PPM image
- */
- static void writeBinaryPPM(ImageType image, const char *filename, int width, int height, int numChannels = IMAGE_TYPE_NUM_CHANNELS);
-
- /**
- * Allocate space for a standard image.
- */
- static ImageType allocateImage(int width, int height, int numChannels, short int border = 0);
-
- /**
- * Free memory of image
- */
- static void freeImage(ImageType image);
-
- static ImageType *imageTypeToRowPointers(ImageType out, int width, int height);
- /**
- * Get time.
- */
- static double getTime();
-
-protected:
-
- /**
- * Constants for YVU/RGB conversion
- */
- static const int REDY = 257;
- static const int REDV = 439;
- static const int REDU = 148;
- static const int GREENY = 504;
- static const int GREENV = 368;
- static const int GREENU = 291;
- static const int BLUEY = 98;
- static const int BLUEV = 71;
- static const int BLUEU = 439;
-
-};
-
-/**
- * Structure containing an image and other bookkeeping items.
- * Used in YUVinfo to store separate YVU image planes.
- */
-typedef struct {
- ImageType *ptr;
- unsigned short width;
- unsigned short height;
- unsigned short border;
- unsigned short pitch;
-} BimageInfo;
-
-/**
- * A YUV image container,
- */
-class YUVinfo {
-public:
- static YUVinfo *allocateImage(unsigned short width, unsigned short height);
- static void mapYUVInfoToImage(YUVinfo *img, unsigned char *position);
-
- /**
- * Y Plane
- */
- BimageInfo Y;
-
- /**
- * V (1st color) plane
- */
- BimageInfo V;
-
- /**
- * U (1st color) plane
- */
- BimageInfo U;
-};
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/Interp.h b/jni_mosaic/feature_mos/src/mosaic/Interp.h
deleted file mode 100644
index 19c4a40cb..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Interp.h
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////////////
-// Interp.h
-// $Id: Interp.h,v 1.2 2011/06/17 13:35:48 mbansal Exp $
-
-#ifndef INTERP_H
-#define INTERP_H
-
-#include "Pyramid.h"
-
-#define CTAPS 40
-static double ciTable[81] = {
- 1, 0.998461, 0.993938, 0.98657, 0.9765,
- 0.963867, 0.948813, 0.931477, 0.912, 0.890523,
- 0.867188, 0.842133, 0.8155, 0.78743, 0.758062,
- 0.727539, 0.696, 0.663586, 0.630437, 0.596695,
- 0.5625, 0.527992, 0.493312, 0.458602, 0.424,
- 0.389648, 0.355687, 0.322258, 0.2895, 0.257555,
- 0.226562, 0.196664, 0.168, 0.140711, 0.114937,
- 0.0908203, 0.0685, 0.0481172, 0.0298125, 0.0137266,
- 0, -0.0118828, -0.0225625, -0.0320859, -0.0405,
- -0.0478516, -0.0541875, -0.0595547, -0.064, -0.0675703,
- -0.0703125, -0.0722734, -0.0735, -0.0740391, -0.0739375,
- -0.0732422, -0.072, -0.0702578, -0.0680625, -0.0654609,
- -0.0625, -0.0592266, -0.0556875, -0.0519297, -0.048,
- -0.0439453, -0.0398125, -0.0356484, -0.0315, -0.0274141,
- -0.0234375, -0.0196172, -0.016, -0.0126328, -0.0095625,
- -0.00683594, -0.0045, -0.00260156, -0.0011875, -0.000304687, 0.0
-};
-
-inline double ciCalc(PyramidShort *img, int xi, int yi, double xfrac, double yfrac)
-{
- double tmpf[4];
-
- // Interpolate using 16 points
- ImageTypeShortBase *in = img->ptr[yi-1] + xi - 1;
- int off = (int)(xfrac * CTAPS);
-
- tmpf[0] = in[0] * ciTable[off + 40];
- tmpf[0] += in[1] * ciTable[off];
- tmpf[0] += in[2] * ciTable[40 - off];
- tmpf[0] += in[3] * ciTable[80 - off];
- in += img->pitch;
- tmpf[1] = in[0] * ciTable[off + 40];
- tmpf[1] += in[1] * ciTable[off];
- tmpf[1] += in[2] * ciTable[40 - off];
- tmpf[1] += in[3] * ciTable[80 - off];
- in += img->pitch;
- tmpf[2] = in[0] * ciTable[off + 40];
- tmpf[2] += in[1] * ciTable[off];
- tmpf[2] += in[2] * ciTable[40 - off];
- tmpf[2] += in[3] * ciTable[80 - off];
- in += img->pitch;
- tmpf[3] = in[0] * ciTable[off + 40];
- tmpf[3] += in[1] * ciTable[off];
- tmpf[3] += in[2] * ciTable[40 - off];
- tmpf[3] += in[3] * ciTable[80 - off];
-
- // this is the final interpolation
- off = (int)(yfrac * CTAPS);
- return (ciTable[off + 40] * tmpf[0] + ciTable[off] * tmpf[1] +
- ciTable[40 - off] * tmpf[2] + ciTable[80 - off] * tmpf[3]);
-}
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/Log.h b/jni_mosaic/feature_mos/src/mosaic/Log.h
deleted file mode 100644
index cf6f14b18..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Log.h
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef LOG_H_
-#define LOG_H
-
-#include <android/log.h>
-#define LOGV(...) __android_log_print(ANDROID_LOG_SILENT, LOG_TAG, __VA_ARGS__)
-#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
-#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/MatrixUtils.h b/jni_mosaic/feature_mos/src/mosaic/MatrixUtils.h
deleted file mode 100644
index a0b84d813..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/MatrixUtils.h
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// Matrixutils.h
-// $Id: MatrixUtils.h,v 1.5 2011/05/16 15:33:06 mbansal Exp $
-
-
-#ifndef MATRIX_UTILS_H
-#define MATRIX_UTILS_H
-
-/* Simple class for 3x3 matrix, mainly used to convert from 9x1
- * to 3x3
- */
-class Matrix33 {
-public:
-
- /**
- * Empty constructor
- */
- Matrix33() {
- initialize();
- }
-
- /**
- * Constructor with identity initialization
- * Arguments:
- * identity: Specifies wether to initialize matrix to
- * identity or zeros
- */
- Matrix33(bool identity) {
- initialize(identity);
- }
-
- /**
- * Initialize to identity matrix
- */
- void initialize(bool identity = false) {
- mat[0][1] = mat[0][2] = mat[1][0] = mat[1][2] = mat[2][0] = mat[2][1] = 0.0;
- if (identity) {
- mat[0][0] = mat[1][1] = mat[2][2] = 1.0;
- } else {
- mat[0][0] = mat[1][1] = mat[2][2] = 0.0;
- }
- }
-
- /**
- * Conver ta 9x1 matrix to a 3x3 matrix
- */
- static void convert9to33(double out[3][3], double in[9]) {
- out[0][0] = in[0];
- out[0][1] = in[1];
- out[0][2] = in[2];
-
- out[1][0] = in[3];
- out[1][1] = in[4];
- out[1][2] = in[5];
-
- out[2][0] = in[6];
- out[2][1] = in[7];
- out[2][2] = in[8];
-
- }
-
- /* Matrix data */
- double mat[3][3];
-
-};
-
-/* Simple class for 9x1 matrix, mainly used to convert from 3x3
- * to 9x1
- */
-class Matrix9 {
-public:
-
- /**
- * Empty constructor
- */
- Matrix9() {
- initialize();
- }
-
- /**
- * Constructor with identity initialization
- * Arguments:
- * identity: Specifies wether to initialize matrix to
- * identity or zeros
- */
- Matrix9(bool identity) {
- initialize(identity);
- }
-
- /**
- * Initialize to identity matrix
- */
- void initialize(bool identity = false) {
- mat[1] = mat[2] = mat[3] = mat[5] = mat[6] = mat[7] = 0.0;
- if (identity) {
- mat[0] = mat[4] = mat[8] = 1.0;
- } else {
- mat[0] = mat[4] = mat[8] = 0.0;
- }
- }
-
- /**
- * Conver ta 3x3 matrix to a 9x1 matrix
- */
- static void convert33to9(double out[9], double in[3][3]) {
- out[0] = in[0][0];
- out[1] = in[0][1];
- out[2] = in[0][2];
-
- out[3] = in[1][0];
- out[4] = in[1][1];
- out[5] = in[1][2];
-
- out[6] = in[2][0];
- out[7] = in[2][1];
- out[8] = in[2][2];
-
- }
-
- /* Matrix data */
- double mat[9];
-
-};
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/Mosaic.cpp b/jni_mosaic/feature_mos/src/mosaic/Mosaic.cpp
deleted file mode 100644
index 7b96fa5c5..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Mosaic.cpp
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// Mosaic.pp
-// S.O. # :
-// Author(s): zkira
-// $Id: Mosaic.cpp,v 1.20 2011/06/24 04:22:14 mbansal Exp $
-
-#include <stdio.h>
-#include <string.h>
-
-#include "Mosaic.h"
-#include "trsMatrix.h"
-
-#include "Log.h"
-#define LOG_TAG "MOSAIC"
-
-Mosaic::Mosaic()
-{
- initialized = false;
- imageMosaicYVU = NULL;
- frames_size = 0;
- max_frames = 200;
-}
-
-Mosaic::~Mosaic()
-{
- for (int i = 0; i < frames_size; i++)
- {
- if (frames[i])
- delete frames[i];
- }
- delete frames;
- delete rframes;
-
- for (int j = 0; j < owned_size; j++)
- delete owned_frames[j];
- delete owned_frames;
-
- if (aligner != NULL)
- delete aligner;
- if (blender != NULL)
- delete blender;
-}
-
-int Mosaic::initialize(int blendingType, int stripType, int width, int height, int nframes, bool quarter_res, float thresh_still)
-{
- this->blendingType = blendingType;
-
- // TODO: Review this logic if enabling FULL or PAN mode
- if (blendingType == Blend::BLEND_TYPE_FULL ||
- blendingType == Blend::BLEND_TYPE_PAN)
- {
- stripType = Blend::STRIP_TYPE_THIN;
- }
-
- this->stripType = stripType;
- this->width = width;
- this->height = height;
-
-
- mosaicWidth = mosaicHeight = 0;
- imageMosaicYVU = NULL;
-
- frames = new MosaicFrame *[max_frames];
- rframes = new MosaicFrame *[max_frames];
-
- if(nframes>-1)
- {
- for(int i=0; i<nframes; i++)
- {
- frames[i] = new MosaicFrame(this->width,this->height,false); // Do no allocate memory for YUV data
- }
- }
- else
- {
- for(int i=0; i<max_frames; i++)
- {
- frames[i] = NULL;
- }
- }
-
- owned_frames = new ImageType[max_frames];
- owned_size = 0;
-
- LOGV("Initialize %d %d", width, height);
- LOGV("Frame width %d,%d", width, height);
- LOGV("Max num frames %d", max_frames);
-
- aligner = new Align();
- aligner->initialize(width, height,quarter_res,thresh_still);
-
- if (blendingType == Blend::BLEND_TYPE_FULL ||
- blendingType == Blend::BLEND_TYPE_PAN ||
- blendingType == Blend::BLEND_TYPE_CYLPAN ||
- blendingType == Blend::BLEND_TYPE_HORZ) {
- blender = new Blend();
- blender->initialize(blendingType, stripType, width, height);
- } else {
- blender = NULL;
- LOGE("Error: Unknown blending type %d",blendingType);
- return MOSAIC_RET_ERROR;
- }
-
- initialized = true;
-
- return MOSAIC_RET_OK;
-}
-
-int Mosaic::addFrameRGB(ImageType imageRGB)
-{
- ImageType imageYVU;
- // Convert to YVU24 which is used by blending
- imageYVU = ImageUtils::allocateImage(this->width, this->height, ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
- ImageUtils::rgb2yvu(imageYVU, imageRGB, width, height);
-
- int existing_frames_size = frames_size;
- int ret = addFrame(imageYVU);
-
- if (frames_size > existing_frames_size)
- owned_frames[owned_size++] = imageYVU;
- else
- ImageUtils::freeImage(imageYVU);
-
- return ret;
-}
-
-int Mosaic::addFrame(ImageType imageYVU)
-{
- if(frames[frames_size]==NULL)
- frames[frames_size] = new MosaicFrame(this->width,this->height,false);
-
- MosaicFrame *frame = frames[frames_size];
-
- frame->image = imageYVU;
-
- // Add frame to aligner
- int ret = MOSAIC_RET_ERROR;
- if (aligner != NULL)
- {
- // Note aligner takes in RGB images
- int align_flag = Align::ALIGN_RET_OK;
- align_flag = aligner->addFrame(frame->image);
- aligner->getLastTRS(frame->trs);
-
- if (frames_size >= max_frames)
- {
- LOGV("WARNING: More frames than preallocated, ignoring."
- "Increase maximum number of frames (-f <max_frames>) to avoid this");
- return MOSAIC_RET_ERROR;
- }
-
- switch (align_flag)
- {
- case Align::ALIGN_RET_OK:
- frames_size++;
- ret = MOSAIC_RET_OK;
- break;
- case Align::ALIGN_RET_FEW_INLIERS:
- frames_size++;
- ret = MOSAIC_RET_FEW_INLIERS;
- break;
- case Align::ALIGN_RET_LOW_TEXTURE:
- ret = MOSAIC_RET_LOW_TEXTURE;
- break;
- case Align::ALIGN_RET_ERROR:
- ret = MOSAIC_RET_ERROR;
- break;
- default:
- break;
- }
- }
-
- return ret;
-}
-
-
-int Mosaic::createMosaic(float &progress, bool &cancelComputation)
-{
- if (frames_size <= 0)
- {
- // Haven't accepted any frame in aligner. No need to do blending.
- progress = TIME_PERCENT_ALIGN + TIME_PERCENT_BLEND
- + TIME_PERCENT_FINAL;
- return MOSAIC_RET_OK;
- }
-
- if (blendingType == Blend::BLEND_TYPE_PAN)
- {
-
- balanceRotations();
-
- }
-
- int ret = Blend::BLEND_RET_ERROR;
-
- // Blend the mosaic (alignment has already been done)
- if (blender != NULL)
- {
- ret = blender->runBlend((MosaicFrame **) frames, (MosaicFrame **) rframes,
- frames_size, imageMosaicYVU,
- mosaicWidth, mosaicHeight, progress, cancelComputation);
- }
-
- switch(ret)
- {
- case Blend::BLEND_RET_ERROR:
- case Blend::BLEND_RET_ERROR_MEMORY:
- ret = MOSAIC_RET_ERROR;
- break;
- case Blend::BLEND_RET_CANCELLED:
- ret = MOSAIC_RET_CANCELLED;
- break;
- case Blend::BLEND_RET_OK:
- ret = MOSAIC_RET_OK;
- }
- return ret;
-}
-
-ImageType Mosaic::getMosaic(int &width, int &height)
-{
- width = mosaicWidth;
- height = mosaicHeight;
-
- return imageMosaicYVU;
-}
-
-
-
-int Mosaic::balanceRotations()
-{
- // Normalize to the mean angle of rotation (Smiley face)
- double sineAngle = 0.0;
-
- for (int i = 0; i < frames_size; i++) sineAngle += frames[i]->trs[0][1];
- sineAngle /= frames_size;
- // Calculate the cosineAngle (1 - sineAngle*sineAngle) = cosineAngle*cosineAngle
- double cosineAngle = sqrt(1.0 - sineAngle*sineAngle);
- double m[3][3] = {
- { cosineAngle, -sineAngle, 0 },
- { sineAngle, cosineAngle, 0},
- { 0, 0, 1}};
- double tmp[3][3];
-
- for (int i = 0; i < frames_size; i++) {
- memcpy(tmp, frames[i]->trs, sizeof(tmp));
- mult33d(frames[i]->trs, m, tmp);
- }
-
- return MOSAIC_RET_OK;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic/Mosaic.h b/jni_mosaic/feature_mos/src/mosaic/Mosaic.h
deleted file mode 100644
index 9dea66422..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Mosaic.h
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// Mosaic.h
-// S.O. # :
-// Author(s): zkira
-// $Id: Mosaic.h,v 1.16 2011/06/24 04:22:14 mbansal Exp $
-
-#ifndef MOSAIC_H
-#define MOSAIC_H
-
-#include "ImageUtils.h"
-#include "AlignFeatures.h"
-#include "Blend.h"
-#include "MosaicTypes.h"
-
-/*! \mainpage Mosaic
-
- \section intro Introduction
- The class Mosaic provides a simple interface to the panoramic mosaicing algorithm. The class allows passing in individual image frames to be stitched together, computes the alignment transformation between them, and then stitches and blends them together into a single panoramic output which can then be accessed as a single image. \
-
- \section usage Usage
- The class methods need to be called as outlined in the sample application which is created from the mosaic_main.cpp file in the directory src/mosaic/. A brief snapshot of the flow is given below:
-
- \code
- Mosaic mosaic;
- // Define blending types to use, and the frame dimensions
- int blendingType = Blend::BLEND_TYPE_CYLPAN;
- int stripType = Blend::STRIP_TYPE_THIN;
- int width = 640;
- int height = 480;
-
- while (<image frames are available>)
- {
- // Check for initialization and if not, initialize
- if (!mosaic.isInitialized())
- {
- // Initialize mosaic processing
- mosaic.initialize(blendingType, stripType, width, height, -1, false, 5.0f);
- }
-
- // Add to list of frames
- mosaic.addFrameRGB(imageRGB);
-
- // Free image
- ImageUtils::freeImage(imageRGB);
- }
-
- // Create the mosaic
- ret = mosaic.createMosaic();
-
- // Get back the result
- resultYVU = mosaic.getMosaic(mosaicWidth, mosaicHeight);
-
- printf("Got mosaic of size %d,%d\n", mosaicWidth, mosaicHeight);
-
- \endcode
-*/
-
-/*!
- * Main class that creates a mosaic by creating an aligner and blender.
- */
-class Mosaic
-{
-
-public:
-
- Mosaic();
- ~Mosaic();
-
- /*!
- * Creates the aligner and blender and initializes state.
- * \param blendingType Type of blending to perform
- * \param stripType Type of strip to use. 0: thin, 1: wide. stripType
- * is effective only when blendingType is CylPan or
- * Horz. Otherwise, it is set to thin irrespective of the input.
- * \param width Width of input images (note: all images must be same size)
- * \param height Height of input images (note: all images must be same size)
- * \param nframes Number of frames to pre-allocate; default value -1 will allocate each frame as it comes
- * \param quarter_res Whether to compute alignment at quarter the input resolution (default = false)
- * \param thresh_still Minimum number of pixels of translation detected between the new frame and the last frame before this frame is added to be mosaiced. For the low-res processing at 320x180 resolution input, we set this to 5 pixels. To reject no frames, set this to 0.0 (default value).
- * \return Return code signifying success or failure.
- */
- int initialize(int blendingType, int stripType, int width, int height, int nframes = -1, bool quarter_res = false, float thresh_still = 0.0);
-
- /*!
- * Adds a YVU frame to the mosaic.
- * \param imageYVU Pointer to a YVU image.
- * \return Return code signifying success or failure.
- */
- int addFrame(ImageType imageYVU);
-
- /*!
- * Adds a RGB frame to the mosaic.
- * \param imageRGB Pointer to a RGB image.
- * \return Return code signifying success or failure.
- */
- int addFrameRGB(ImageType imageRGB);
-
- /*!
- * After adding all frames, call this function to perform the final blending.
- * \param progress Variable to set the current progress in.
- * \return Return code signifying success or failure.
- */
- int createMosaic(float &progress, bool &cancelComputation);
-
- /*!
- * Obtains the resulting mosaic and its dimensions.
- * \param width Width of the resulting mosaic (returned)
- * \param height Height of the resulting mosaic (returned)
- * \return Pointer to image.
- */
- ImageType getMosaic(int &width, int &height);
-
- /*!
- * Provides access to the internal alignment object pointer.
- * \return Pointer to the aligner object.
- */
- Align* getAligner() { return aligner; }
-
- /*!
- * Obtain initialization state.
- *
- * return Returns true if initialized, false otherwise.
- */
- bool isInitialized() { return initialized; }
-
-
- /*!
- * Return codes for mosaic.
- */
- static const int MOSAIC_RET_OK = 1;
- static const int MOSAIC_RET_ERROR = -1;
- static const int MOSAIC_RET_CANCELLED = -2;
- static const int MOSAIC_RET_LOW_TEXTURE = -3;
- static const int MOSAIC_RET_FEW_INLIERS = 2;
-
-protected:
-
- /**
- * Size of image frames making up mosaic
- */
- int width, height;
-
- /**
- * Size of actual mosaic
- */
- int mosaicWidth, mosaicHeight;
-
- /**
- * Bounding box to crop the mosaic when the gray border is not desired.
- */
- MosaicRect mosaicCroppingRect;
-
- ImageType imageMosaicYVU;
-
- /**
- * Collection of frames that will make up mosaic.
- */
- MosaicFrame **frames;
-
- /**
- * Subset of frames that are considered as relevant.
- */
- MosaicFrame **rframes;
-
- int frames_size;
- int max_frames;
-
- /**
- * Implicitly created frames, should be freed by Mosaic.
- */
- ImageType *owned_frames;
- int owned_size;
-
- /**
- * Initialization state.
- */
- bool initialized;
-
- /**
- * Type of blending to perform.
- */
- int blendingType;
-
- /**
- * Type of strip to use. 0: thin (default), 1: wide
- */
- int stripType;
-
- /**
- * Pointer to aligner.
- */
- Align *aligner;
-
- /**
- * Pointer to blender.
- */
- Blend *blender;
-
- /**
- * Modifies TRS matrices so that rotations are balanced
- * about center of mosaic
- *
- * Side effect: TRS matrices of all mosaic frames
- * are modified
- */
- int balanceRotations();
-
-};
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/MosaicTypes.h b/jni_mosaic/feature_mos/src/mosaic/MosaicTypes.h
deleted file mode 100644
index 395ec4586..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/MosaicTypes.h
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// MosaicTypes.h
-// S.O. # :
-// Author(s): zkira
-// $Id: MosaicTypes.h,v 1.15 2011/06/17 13:35:48 mbansal Exp $
-
-
-#ifndef MOSAIC_TYPES_H
-#define MOSAIC_TYPES_H
-
-#include "ImageUtils.h"
-
-/**
- * Definition of rectangle in a mosaic.
- */
-class MosaicRect
-{
- public:
- MosaicRect()
- {
- left = right = top = bottom = 0.0;
- }
-
- inline int Width()
- {
- return right - left;
- }
-
- inline int Height()
- {
- return bottom - top;
- }
-
- /**
- * Bounds of the rectangle
- */
- int left, right, top, bottom;
-};
-
-class BlendRect
-{
- public:
- double lft, rgt, top, bot;
-};
-
-/**
- * A frame making up the mosaic.
- * Note: Currently assumes a YVU image
- * containing separate Y,V, and U planes
- * in contiguous memory (in that order).
- */
-class MosaicFrame {
-public:
- ImageType image;
- double trs[3][3];
- int width, height;
- BlendRect brect; // This frame warped to the Mosaic coordinate system
- BlendRect vcrect; // brect clipped using the voronoi neighbors
- bool internal_allocation;
-
- MosaicFrame() { };
- MosaicFrame(int _width, int _height, bool allocate=true)
- {
- width = _width;
- height = _height;
- internal_allocation = allocate;
- if(internal_allocation)
- image = ImageUtils::allocateImage(width, height, ImageUtils::IMAGE_TYPE_NUM_CHANNELS);
- }
-
-
- ~MosaicFrame()
- {
- if(internal_allocation)
- if (image)
- free(image);
- }
-
- /**
- * Get the V plane of the image.
- */
- inline ImageType getV()
- {
- return (image + (width*height));
- }
-
- /**
- * Get the U plane of the image.
- */
- inline ImageType getU()
- {
- return (image + (width*height*2));
- }
-
- /**
- * Get a pixel from the V plane of the image.
- */
- inline int getV(int y, int x)
- {
- ImageType U = image + (width*height);
- return U[y*width+x];
- }
-
- /**
- * Get a pixel from the U plane of the image.
- */
- inline int getU(int y, int x)
- {
- ImageType U = image + (width*height*2);
- return U[y*width+x];
- }
-
-};
-
-/**
- * Structure for describing a warp.
- */
-typedef struct {
- int horizontal;
- double theta;
- double x;
- double y;
- double width;
- double radius;
- double direction;
- double correction;
- int blendRange;
- int blendRangeUV;
- int nlevs;
- int nlevsC;
- int blendingType;
- int stripType;
- // Add an overlap to prevent a gap between pictures due to roundoffs
- double roundoffOverlap;// 1.5
-
-} BlendParams;
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/Pyramid.cpp b/jni_mosaic/feature_mos/src/mosaic/Pyramid.cpp
deleted file mode 100644
index b022d73db..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Pyramid.cpp
+++ /dev/null
@@ -1,270 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// pyramid.cpp
-
-#include <stdio.h>
-#include <string.h>
-
-#include "Pyramid.h"
-
-// We allocate the entire pyramid into one contiguous storage. This makes
-// cleanup easier than fragmented stuff. In addition, we added a "pitch"
-// field, so pointer manipulation is much simpler when it would be faster.
-PyramidShort *PyramidShort::allocatePyramidPacked(real levels,
- real width, real height, real border)
-{
- real border2 = (real) (border << 1);
- int lines, size = calcStorage(width, height, border2, levels, &lines);
-
- PyramidShort *img = (PyramidShort *) calloc(sizeof(PyramidShort) * levels
- + sizeof(short *) * lines +
- + sizeof(short) * size, 1);
-
- if (img) {
- PyramidShort *curr, *last;
- ImageTypeShort *y = (ImageTypeShort *) &img[levels];
- ImageTypeShort position = (ImageTypeShort) &y[lines];
- for (last = (curr = img) + levels; curr < last; curr++) {
- curr->width = width;
- curr->height = height;
- curr->border = border;
- curr->pitch = (real) (width + border2);
- curr->ptr = y + border;
-
- // Assign row pointers
- for (int j = height + border2; j--; y++, position += curr->pitch) {
- *y = position + border;
- }
-
- width >>= 1;
- height >>= 1;
- }
- }
-
- return img;
-}
-
-// Allocate an image of type short
-PyramidShort *PyramidShort::allocateImage(real width, real height, real border)
-{
- real border2 = (real) (border << 1);
- PyramidShort *img = (PyramidShort *)
- calloc(sizeof(PyramidShort) + sizeof(short *) * (height + border2) +
- sizeof(short) * (width + border2) * (height + border2), 1);
-
- if (img) {
- short **y = (short **) &img[1];
- short *position = (short *) &y[height + border2];
- img->width = width;
- img->height = height;
- img->border = border;
- img->pitch = (real) (width + border2);
- img->ptr = y + border;
- position += border; // Move position down to origin of real image
-
- // Assign row pointers
- for (int j = height + border2; j--; y++, position += img->pitch) {
- *y = position;
- }
- }
-
- return img;
-}
-
-// Free the images
-void PyramidShort::freeImage(PyramidShort *image)
-{
- if (image != NULL)
- free(image);
-}
-
-// Calculate amount of storage needed taking into account the borders, etc.
-unsigned int PyramidShort::calcStorage(real width, real height, real border2, int levels, int *lines)
-{
- int size;
-
- *lines = size = 0;
-
- while(levels--) {
- size += (width + border2) * (height + border2);
- *lines += height + border2;
- width >>= 1;
- height >>= 1;
- }
-
- return size;
-}
-
-void PyramidShort::BorderSpread(PyramidShort *pyr, int left, int right,
- int top, int bot)
-{
- int off, off2, height, h, w;
- ImageTypeShort base;
-
- if (left || right) {
- off = pyr->border - left;
- off2 = pyr->width + off + pyr->border - right - 1;
- h = pyr->border - top;
- height = pyr->height + (h << 1);
- base = pyr->ptr[-h] - off;
-
- // spread in X
- for (h = height; h--; base += pyr->pitch) {
- for (w = left; w--;)
- base[-1 - w] = base[0];
- for (w = right; w--;)
- base[off2 + w + 1] = base[off2];
- }
- }
-
- if (top || bot) {
- // spread in Y
- base = pyr->ptr[top - pyr->border] - pyr->border;
- for (h = top; h--; base -= pyr->pitch) {
- memcpy(base - pyr->pitch, base, pyr->pitch * sizeof(short));
- }
-
- base = pyr->ptr[pyr->height + pyr->border - bot] - pyr->border;
- for (h = bot; h--; base += pyr->pitch) {
- memcpy(base, base - pyr->pitch, pyr->pitch * sizeof(short));
- }
- }
-}
-
-void PyramidShort::BorderExpandOdd(PyramidShort *in, PyramidShort *out, PyramidShort *scr,
- int mode)
-{
- int i,j;
- int off = in->border / 2;
-
- // Vertical Filter
- for (j = -off; j < in->height + off; j++) {
- int j2 = j * 2;
- int limit = scr->width + scr->border;
- for (i = -scr->border; i < limit; i++) {
- int t1 = in->ptr[j][i];
- int t2 = in->ptr[j+1][i];
- scr->ptr[j2][i] = (short)
- ((6 * t1 + (in->ptr[j-1][i] + t2) + 4) >> 3);
- scr->ptr[j2+1][i] = (short)((t1 + t2 + 1) >> 1);
- }
- }
-
- BorderSpread(scr, 0, 0, 3, 3);
-
- // Horizontal Filter
- int limit = out->height + out->border;
- for (j = -out->border; j < limit; j++) {
- for (i = -off; i < scr->width + off; i++) {
- int i2 = i * 2;
- int t1 = scr->ptr[j][i];
- int t2 = scr->ptr[j][i+1];
- out->ptr[j][i2] = (short) (out->ptr[j][i2] +
- (mode * ((6 * t1 +
- scr->ptr[j][i-1] + t2 + 4) >> 3)));
- out->ptr[j][i2+1] = (short) (out->ptr[j][i2+1] +
- (mode * ((t1 + t2 + 1) >> 1)));
- }
- }
-
-}
-
-int PyramidShort::BorderExpand(PyramidShort *pyr, int nlev, int mode)
-{
- PyramidShort *tpyr = pyr + nlev - 1;
- PyramidShort *scr = allocateImage(pyr[1].width, pyr[0].height, pyr->border);
- if (scr == NULL) return 0;
-
- if (mode > 0) {
- // Expand and add (reconstruct from Laplacian)
- for (; tpyr > pyr; tpyr--) {
- scr->width = tpyr[0].width;
- scr->height = tpyr[-1].height;
- BorderExpandOdd(tpyr, tpyr - 1, scr, 1);
- }
- }
- else if (mode < 0) {
- // Expand and subtract (build Laplacian)
- while ((pyr++) < tpyr) {
- scr->width = pyr[0].width;
- scr->height = pyr[-1].height;
- BorderExpandOdd(pyr, pyr - 1, scr, -1);
- }
- }
-
- freeImage(scr);
- return 1;
-}
-
-void PyramidShort::BorderReduceOdd(PyramidShort *in, PyramidShort *out, PyramidShort *scr)
-{
- ImageTypeShortBase *s, *ns, *ls, *p, *np;
-
- int off = scr->border - 2;
- s = scr->ptr[-scr->border] - (off >> 1);
- ns = s + scr->pitch;
- ls = scr->ptr[scr->height + scr->border - 1] + scr->pitch - (off >> 1);
- int width = scr->width + scr->border;
- p = in->ptr[-scr->border] - off;
- np = p + in->pitch;
-
- // treat it as if the whole thing were the image
- for (; s < ls; s = ns, ns += scr->pitch, p = np, np += in->pitch) {
- for (int w = width; w--; s++, p += 2) {
- *s = (short)((((int) p[-2]) + ((int) p[2]) + 8 + // 1
- ((((int) p[-1]) + ((int) p[1])) << 2) + // 4
- ((int) *p) * 6) >> 4); // 6
- }
- }
-
- BorderSpread(scr, 5, 4 + ((in->width ^ 1) & 1), 0, 0); //
-
- s = out->ptr[-(off >> 1)] - out->border;
- ns = s + out->pitch;
- ls = s + out->pitch * (out->height + off);
- p = scr->ptr[-off] - out->border;
- int pitch = scr->pitch;
- int pitch2 = pitch << 1;
- np = p + pitch2;
- for (; s < ls; s = ns, ns += out->pitch, p = np, np += pitch2) {
- for (int w = out->pitch; w--; s++, p++) {
- *s = (short)((((int) p[-pitch2]) + ((int) p[pitch2]) + 8 + // 1
- ((((int) p[-pitch]) + ((int) p[pitch])) << 2) + // 4
- ((int) *p) * 6) >> 4); // 6
- }
- }
- BorderSpread(out, 0, 0, 5, 5);
-
-}
-
-int PyramidShort::BorderReduce(PyramidShort *pyr, int nlev)
-{
- PyramidShort *scr = allocateImage(pyr[1].width, pyr[0].height, pyr->border);
- if (scr == NULL)
- return 0;
-
- BorderSpread(pyr, pyr->border, pyr->border, pyr->border, pyr->border);
- while (--nlev) {
- BorderReduceOdd(pyr, pyr + 1, scr);
- pyr++;
- scr->width = pyr[1].width;
- scr->height = pyr[0].height;
- }
-
- freeImage(scr);
- return 1;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic/Pyramid.h b/jni_mosaic/feature_mos/src/mosaic/Pyramid.h
deleted file mode 100644
index c5fe90714..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/Pyramid.h
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// Pyramid.h
-
-#ifndef PYRAMID_H
-#define PYRAMID_H
-
-#include "ImageUtils.h"
-
-typedef unsigned short int real;
-
-// Structure containing a packed pyramid of type ImageTypeShort. Used for pyramid
-// blending, among other things.
-
-class PyramidShort
-{
-
-public:
-
- ImageTypeShort *ptr; // Pointer containing the image
- real width, height; // Width and height of input images
- real numChannels; // Number of channels in input images
- real border; // border size
- real pitch; // Pitch. Used for moving through image efficiently.
-
- static PyramidShort *allocatePyramidPacked(real width, real height, real levels, real border = 0);
- static PyramidShort *allocateImage(real width, real height, real border);
- static void createPyramid(ImageType image, PyramidShort *pyramid, int last = 3 );
- static void freeImage(PyramidShort *image);
-
- static unsigned int calcStorage(real width, real height, real border2, int levels, int *lines);
-
- static void BorderSpread(PyramidShort *pyr, int left, int right, int top, int bot);
- static void BorderExpandOdd(PyramidShort *in, PyramidShort *out, PyramidShort *scr, int mode);
- static int BorderExpand(PyramidShort *pyr, int nlev, int mode);
- static int BorderReduce(PyramidShort *pyr, int nlev);
- static void BorderReduceOdd(PyramidShort *in, PyramidShort *out, PyramidShort *scr);
-};
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic/trsMatrix.cpp b/jni_mosaic/feature_mos/src/mosaic/trsMatrix.cpp
deleted file mode 100644
index 5fc6a86b3..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/trsMatrix.cpp
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// trsMatrix.cpp
-// $Id: trsMatrix.cpp,v 1.9 2011/06/17 13:35:48 mbansal Exp $
-
-#include "stdio.h"
-#include <math.h>
-#include "trsMatrix.h"
-
-void mult33d(double a[3][3], double b[3][3], double c[3][3])
-{
- a[0][0] = b[0][0]*c[0][0] + b[0][1]*c[1][0] + b[0][2]*c[2][0];
- a[0][1] = b[0][0]*c[0][1] + b[0][1]*c[1][1] + b[0][2]*c[2][1];
- a[0][2] = b[0][0]*c[0][2] + b[0][1]*c[1][2] + b[0][2]*c[2][2];
- a[1][0] = b[1][0]*c[0][0] + b[1][1]*c[1][0] + b[1][2]*c[2][0];
- a[1][1] = b[1][0]*c[0][1] + b[1][1]*c[1][1] + b[1][2]*c[2][1];
- a[1][2] = b[1][0]*c[0][2] + b[1][1]*c[1][2] + b[1][2]*c[2][2];
- a[2][0] = b[2][0]*c[0][0] + b[2][1]*c[1][0] + b[2][2]*c[2][0];
- a[2][1] = b[2][0]*c[0][1] + b[2][1]*c[1][1] + b[2][2]*c[2][1];
- a[2][2] = b[2][0]*c[0][2] + b[2][1]*c[1][2] + b[2][2]*c[2][2];
-}
-
-
-// normProjMat33d
-// m = input matrix
-// return: result if successful
-int normProjMat33d(double m[3][3])
-{
- double m22;
-
- if(m[2][2] == 0.0)
- {
- return 0;
-}
-
- m[0][0] /= m[2][2];
- m[0][1] /= m[2][2];
- m[0][2] /= m[2][2];
- m[1][0] /= m[2][2];
- m[1][1] /= m[2][2];
- m[1][2] /= m[2][2];
- m[2][0] /= m[2][2];
- m[2][1] /= m[2][2];
- m[2][2] = 1.0;
-
- return 1;
-}
-
-// det33d
-// m = input matrix
-// returns: determinant
-double det33d(const double m[3][3])
-{
- double result;
-
- result = m[0][0] * (m[1][1] * m[2][2] - m[1][2] * m[2][1]);
- result += m[0][1] * (m[1][2] * m[2][0] - m[1][0] * m[2][2]);
- result += m[0][2] * (m[1][0] * m[2][1] - m[1][1] * m[2][0]);
-
- return result;
-}
-
-// inv33d
-//
-void inv33d(const double m[3][3], double out[3][3])
-{
- double det = det33d(m);
-
- out[0][0] = (m[1][1]*m[2][2] - m[1][2]*m[2][1]) / det;
- out[1][0] = (m[1][2]*m[2][0] - m[1][0]*m[2][2]) / det;
- out[2][0] = (m[1][0]*m[2][1] - m[1][1]*m[2][0]) / det;
-
- out[0][1] = (m[0][2]*m[2][1] - m[0][1]*m[2][2]) / det;
- out[1][1] = (m[0][0]*m[2][2] - m[0][2]*m[2][0]) / det;
- out[2][1] = (m[0][1]*m[2][0] - m[0][0]*m[2][1]) / det;
-
- out[0][2] = (m[0][1]*m[1][2] - m[0][2]*m[1][1]) / det;
- out[1][2] = (m[0][2]*m[1][0] - m[0][0]*m[1][2]) / det;
- out[2][2] = (m[0][0]*m[1][1] - m[0][1]*m[1][0]) / det;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic/trsMatrix.h b/jni_mosaic/feature_mos/src/mosaic/trsMatrix.h
deleted file mode 100644
index 054cc3335..000000000
--- a/jni_mosaic/feature_mos/src/mosaic/trsMatrix.h
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-///////////////////////////////////////////////////
-// trsMatrix.h
-// $Id: trsMatrix.h,v 1.8 2011/06/17 13:35:48 mbansal Exp $
-
-#ifndef TRSMATRIX_H_
-#define TRSMATRIX_H_
-
-
-// Calculate the determinant of a matrix
-double det33d(const double m[3][3]);
-
-// Invert a matrix
-void inv33d(const double m[3][3], double out[3][3]);
-
-// Multiply a = b * c
-void mult33d(double a[3][3], double b[3][3], double c[3][3]);
-
-// Normalize matrix so matrix[2][2] is '1'
-int normProjMat33d(double m[3][3]);
-
-inline double ProjZ(double trs[3][3], double x, double y, double f)
-{
- return ((trs)[2][0]*(x) + (trs)[2][1]*(y) + (trs)[2][2]*(f));
-}
-
-inline double ProjX(double trs[3][3], double x, double y, double z, double f)
-{
- return (((trs)[0][0]*(x) + (trs)[0][1]*(y) + (trs)[0][2]*(f)) / (z));
-}
-
-inline double ProjY(double trs[3][3], double x, double y, double z, double f)
-{
- return (((trs)[1][0]*(x) + (trs)[1][1]*(y) + (trs)[1][2]*(f)) / (z));
-}
-
-
-#endif
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.cpp b/jni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
deleted file mode 100755
index a956f23b7..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.cpp
+++ /dev/null
@@ -1,98 +0,0 @@
-#include "FrameBuffer.h"
-
-FrameBuffer::FrameBuffer()
-{
- Reset();
-}
-
-FrameBuffer::~FrameBuffer() {
-}
-
-void FrameBuffer::Reset() {
- mFrameBufferName = -1;
- mTextureName = -1;
- mWidth = 0;
- mHeight = 0;
- mFormat = -1;
-}
-
-bool FrameBuffer::InitializeGLContext() {
- Reset();
- return CreateBuffers();
-}
-
-bool FrameBuffer::Init(int width, int height, GLenum format) {
- if (mFrameBufferName == (GLuint)-1) {
- if (!CreateBuffers()) {
- return false;
- }
- }
- glBindFramebuffer(GL_FRAMEBUFFER, mFrameBufferName);
- glBindTexture(GL_TEXTURE_2D, mTextureName);
-
- glTexImage2D(GL_TEXTURE_2D,
- 0,
- format,
- width,
- height,
- 0,
- format,
- GL_UNSIGNED_BYTE,
- NULL);
- if (!checkGlError("bind/teximage")) {
- return false;
- }
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
- // This is necessary to work with user-generated frame buffers with
- // dimensions that are NOT powers of 2.
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
- glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
-
- // Attach texture to frame buffer.
- glFramebufferTexture2D(GL_FRAMEBUFFER,
- GL_COLOR_ATTACHMENT0,
- GL_TEXTURE_2D,
- mTextureName,
- 0);
- checkFramebufferStatus("FrameBuffer.cpp");
- checkGlError("framebuffertexture2d");
-
- if (!checkGlError("texture setup")) {
- return false;
- }
- mWidth = width;
- mHeight = height;
- mFormat = format;
- glBindFramebuffer(GL_FRAMEBUFFER, 0);
- return true;
-}
-
-bool FrameBuffer::CreateBuffers() {
- glGenFramebuffers(1, &mFrameBufferName);
- glGenTextures(1, &mTextureName);
- if (!checkGlError("texture generation")) {
- return false;
- }
- return true;
-}
-
-GLuint FrameBuffer::GetTextureName() const {
- return mTextureName;
-}
-
-GLuint FrameBuffer::GetFrameBufferName() const {
- return mFrameBufferName;
-}
-
-GLenum FrameBuffer::GetFormat() const {
- return mFormat;
-}
-
-int FrameBuffer::GetWidth() const {
- return mWidth;
-}
-
-int FrameBuffer::GetHeight() const {
- return mHeight;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.h b/jni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.h
deleted file mode 100755
index 314b12622..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/FrameBuffer.h
+++ /dev/null
@@ -1,34 +0,0 @@
-#pragma once
-
-#include <EGL/egl.h>
-#include <GLES2/gl2.h>
-#include <GLES2/gl2ext.h>
-
-#define checkGlError(op) checkGLErrorDetail(__FILE__, __LINE__, (op))
-
-extern bool checkGLErrorDetail(const char* file, int line, const char* op);
-extern void checkFramebufferStatus(const char* name);
-
-class FrameBuffer {
- public:
- FrameBuffer();
- virtual ~FrameBuffer();
-
- bool InitializeGLContext();
- bool Init(int width, int height, GLenum format);
- GLuint GetTextureName() const;
- GLuint GetFrameBufferName() const;
- GLenum GetFormat() const;
-
- int GetWidth() const;
- int GetHeight() const;
-
- private:
- void Reset();
- bool CreateBuffers();
- GLuint mFrameBufferName;
- GLuint mTextureName;
- int mWidth;
- int mHeight;
- GLenum mFormat;
-};
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/Renderer.cpp b/jni_mosaic/feature_mos/src/mosaic_renderer/Renderer.cpp
deleted file mode 100755
index b9938eb6b..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/Renderer.cpp
+++ /dev/null
@@ -1,226 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "Renderer.h"
-
-#include "mosaic/Log.h"
-#define LOG_TAG "Renderer"
-
-#include <GLES2/gl2ext.h>
-
-Renderer::Renderer()
- : mGlProgram(0),
- mInputTextureName(-1),
- mInputTextureWidth(0),
- mInputTextureHeight(0),
- mSurfaceWidth(0),
- mSurfaceHeight(0)
-{
- InitializeGLContext();
-}
-
-Renderer::~Renderer() {
-}
-
-GLuint Renderer::loadShader(GLenum shaderType, const char* pSource) {
- GLuint shader = glCreateShader(shaderType);
- if (shader) {
- glShaderSource(shader, 1, &pSource, NULL);
- glCompileShader(shader);
- GLint compiled = 0;
- glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
- if (!compiled) {
- GLint infoLen = 0;
- glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
- if (infoLen) {
- char* buf = (char*) malloc(infoLen);
- if (buf) {
- glGetShaderInfoLog(shader, infoLen, NULL, buf);
- LOGE("Could not compile shader %d:\n%s\n",
- shaderType, buf);
- free(buf);
- }
- glDeleteShader(shader);
- shader = 0;
- }
- }
- }
- return shader;
-}
-
-GLuint Renderer::createProgram(const char* pVertexSource, const char* pFragmentSource)
-{
- GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
- if (!vertexShader)
- {
- return 0;
- }
-
- GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
- if (!pixelShader)
- {
- return 0;
- }
-
- GLuint program = glCreateProgram();
- if (program)
- {
- glAttachShader(program, vertexShader);
- checkGlError("glAttachShader");
- glAttachShader(program, pixelShader);
- checkGlError("glAttachShader");
-
- glLinkProgram(program);
- GLint linkStatus = GL_FALSE;
- glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
-
- LOGI("Program Linked (%d)!", program);
-
- if (linkStatus != GL_TRUE)
- {
- GLint bufLength = 0;
- glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
- if (bufLength)
- {
- char* buf = (char*) malloc(bufLength);
- if (buf)
- {
- glGetProgramInfoLog(program, bufLength, NULL, buf);
- LOGE("Could not link program:\n%s\n", buf);
- free(buf);
- }
- }
- glDeleteProgram(program);
- program = 0;
- }
- }
- return program;
-}
-
-// Set this renderer to use the default frame-buffer (screen) and
-// set the viewport size to be the given width and height (pixels).
-bool Renderer::SetupGraphics(int width, int height)
-{
- bool succeeded = false;
- do {
- if (mGlProgram == 0)
- {
- if (!InitializeGLProgram())
- {
- break;
- }
- }
- glUseProgram(mGlProgram);
- if (!checkGlError("glUseProgram")) break;
-
- glBindFramebuffer(GL_FRAMEBUFFER, 0);
-
- mFrameBuffer = NULL;
- mSurfaceWidth = width;
- mSurfaceHeight = height;
-
- glViewport(0, 0, mSurfaceWidth, mSurfaceHeight);
- if (!checkGlError("glViewport")) break;
- succeeded = true;
- } while (false);
-
- return succeeded;
-}
-
-
-// Set this renderer to use the specified FBO and
-// set the viewport size to be the width and height of this FBO.
-bool Renderer::SetupGraphics(FrameBuffer* buffer)
-{
- bool succeeded = false;
- do {
- if (mGlProgram == 0)
- {
- if (!InitializeGLProgram())
- {
- break;
- }
- }
- glUseProgram(mGlProgram);
- if (!checkGlError("glUseProgram")) break;
-
- glBindFramebuffer(GL_FRAMEBUFFER, buffer->GetFrameBufferName());
-
- mFrameBuffer = buffer;
- mSurfaceWidth = mFrameBuffer->GetWidth();
- mSurfaceHeight = mFrameBuffer->GetHeight();
-
- glViewport(0, 0, mSurfaceWidth, mSurfaceHeight);
- if (!checkGlError("glViewport")) break;
- succeeded = true;
- } while (false);
-
- return succeeded;
-}
-
-bool Renderer::Clear(float r, float g, float b, float a)
-{
- bool succeeded = false;
- do {
- bool rt = (mFrameBuffer == NULL)?
- SetupGraphics(mSurfaceWidth, mSurfaceHeight) :
- SetupGraphics(mFrameBuffer);
-
- if(!rt)
- break;
-
- glClearColor(r, g, b, a);
- glClear(GL_COLOR_BUFFER_BIT);
-
- succeeded = true;
- } while (false);
- return succeeded;
-
-}
-
-void Renderer::InitializeGLContext()
-{
- if(mFrameBuffer != NULL)
- {
- delete mFrameBuffer;
- mFrameBuffer = NULL;
- }
-
- mInputTextureName = -1;
- mInputTextureType = GL_TEXTURE_2D;
- mGlProgram = 0;
-}
-
-int Renderer::GetTextureName()
-{
- return mInputTextureName;
-}
-
-void Renderer::SetInputTextureName(GLuint textureName)
-{
- mInputTextureName = textureName;
-}
-
-void Renderer::SetInputTextureType(GLenum textureType)
-{
- mInputTextureType = textureType;
-}
-
-void Renderer::SetInputTextureDimensions(int width, int height)
-{
- mInputTextureWidth = width;
- mInputTextureHeight = height;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/Renderer.h b/jni_mosaic/feature_mos/src/mosaic_renderer/Renderer.h
deleted file mode 100755
index a43e8028e..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/Renderer.h
+++ /dev/null
@@ -1,65 +0,0 @@
-#pragma once
-
-#include "FrameBuffer.h"
-
-#include <GLES2/gl2.h>
-
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-class Renderer {
- public:
- Renderer();
- virtual ~Renderer();
-
- // Initialize OpenGL resources
- // @return true if successful
- virtual bool InitializeGLProgram() = 0;
-
- bool SetupGraphics(FrameBuffer* buffer);
- bool SetupGraphics(int width, int height);
-
- bool Clear(float r, float g, float b, float a);
-
- int GetTextureName();
- void SetInputTextureName(GLuint textureName);
- void SetInputTextureDimensions(int width, int height);
- void SetInputTextureType(GLenum textureType);
-
- void InitializeGLContext();
-
- protected:
-
- GLuint loadShader(GLenum shaderType, const char* pSource);
- GLuint createProgram(const char*, const char* );
-
- int SurfaceWidth() const { return mSurfaceWidth; }
- int SurfaceHeight() const { return mSurfaceHeight; }
-
- // Source code for shaders.
- virtual const char* VertexShaderSource() const = 0;
- virtual const char* FragmentShaderSource() const = 0;
-
- // Redefine this to use special texture types such as
- // GL_TEXTURE_EXTERNAL_OES.
- GLenum InputTextureType() const { return mInputTextureType; }
-
- GLuint mGlProgram;
- GLuint mInputTextureName;
- GLenum mInputTextureType;
- int mInputTextureWidth;
- int mInputTextureHeight;
-
- // Attribute locations
- GLint mScalingtransLoc;
- GLint maPositionHandle;
- GLint maTextureHandle;
-
-
- int mSurfaceWidth; // Width of target surface.
- int mSurfaceHeight; // Height of target surface.
-
- FrameBuffer *mFrameBuffer;
-};
-
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp b/jni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp
deleted file mode 100755
index 88aac3626..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.cpp
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "SurfaceTextureRenderer.h"
-
-#include <GLES2/gl2ext.h>
-const GLfloat g_vVertices[] = {
- -1.f, -1.f, 0.0f, 1.0f, // Position 0
- 0.0f, 0.0f, // TexCoord 0
- 1.f, -1.f, 0.0f, 1.0f, // Position 1
- 1.0f, 0.0f, // TexCoord 1
- -1.f, 1.f, 0.0f, 1.0f, // Position 2
- 0.0f, 1.0f, // TexCoord 2
- 1.f, 1.f, 0.0f, 1.0f, // Position 3
- 1.0f, 1.0f // TexCoord 3
-};
-GLushort g_iIndices2[] = { 0, 1, 2, 3 };
-
-const int GL_TEXTURE_EXTERNAL_OES_ENUM = 0x8D65;
-
-const int VERTEX_STRIDE = 6 * sizeof(GLfloat);
-
-SurfaceTextureRenderer::SurfaceTextureRenderer() : Renderer() {
- memset(mSTMatrix, 0.0, 16*sizeof(float));
- mSTMatrix[0] = 1.0f;
- mSTMatrix[5] = 1.0f;
- mSTMatrix[10] = 1.0f;
- mSTMatrix[15] = 1.0f;
-}
-
-SurfaceTextureRenderer::~SurfaceTextureRenderer() {
-}
-
-void SurfaceTextureRenderer::SetViewportMatrix(int w, int h, int W, int H)
-{
- for(int i=0; i<16; i++)
- {
- mViewportMatrix[i] = 0.0f;
- }
-
- mViewportMatrix[0] = float(w)/float(W);
- mViewportMatrix[5] = float(h)/float(H);
- mViewportMatrix[10] = 1.0f;
- mViewportMatrix[12] = -1.0f + float(w)/float(W);
- mViewportMatrix[13] = -1.0f + float(h)/float(H);
- mViewportMatrix[15] = 1.0f;
-}
-
-void SurfaceTextureRenderer::SetScalingMatrix(float xscale, float yscale)
-{
- for(int i=0; i<16; i++)
- {
- mScalingMatrix[i] = 0.0f;
- }
-
- mScalingMatrix[0] = xscale;
- mScalingMatrix[5] = yscale;
- mScalingMatrix[10] = 1.0f;
- mScalingMatrix[15] = 1.0f;
-}
-
-void SurfaceTextureRenderer::SetSTMatrix(float *stmat)
-{
- memcpy(mSTMatrix, stmat, 16*sizeof(float));
-}
-
-
-bool SurfaceTextureRenderer::InitializeGLProgram()
-{
- bool succeeded = false;
- do {
- GLuint glProgram;
- glProgram = createProgram(VertexShaderSource(),
- FragmentShaderSource());
- if (!glProgram) {
- break;
- }
-
- glUseProgram(glProgram);
- if (!checkGlError("glUseProgram")) break;
-
- maPositionHandle = glGetAttribLocation(glProgram, "aPosition");
- checkGlError("glGetAttribLocation aPosition");
- maTextureHandle = glGetAttribLocation(glProgram, "aTextureCoord");
- checkGlError("glGetAttribLocation aTextureCoord");
- muSTMatrixHandle = glGetUniformLocation(glProgram, "uSTMatrix");
- checkGlError("glGetUniformLocation uSTMatrix");
- mScalingtransLoc = glGetUniformLocation(glProgram, "u_scalingtrans");
-
- glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
- mGlProgram = glProgram;
- succeeded = true;
- } while (false);
-
- if (!succeeded && (mGlProgram != 0))
- {
- glDeleteProgram(mGlProgram);
- checkGlError("glDeleteProgram");
- mGlProgram = 0;
- }
- return succeeded;
-}
-
-bool SurfaceTextureRenderer::DrawTexture(GLfloat *affine)
-{
- bool succeeded = false;
- do {
- bool rt = (mFrameBuffer == NULL)?
- SetupGraphics(mSurfaceWidth, mSurfaceHeight) :
- SetupGraphics(mFrameBuffer);
-
- if(!rt)
- break;
-
- glDisable(GL_BLEND);
-
- glActiveTexture(GL_TEXTURE0);
- if (!checkGlError("glActiveTexture")) break;
-
- const GLenum texture_type = InputTextureType();
- glBindTexture(texture_type, mInputTextureName);
- if (!checkGlError("glBindTexture")) break;
-
- glUniformMatrix4fv(mScalingtransLoc, 1, GL_FALSE, mScalingMatrix);
- glUniformMatrix4fv(muSTMatrixHandle, 1, GL_FALSE, mSTMatrix);
-
- // Load the vertex position
- glVertexAttribPointer(maPositionHandle, 4, GL_FLOAT,
- GL_FALSE, VERTEX_STRIDE, g_vVertices);
- glEnableVertexAttribArray(maPositionHandle);
- // Load the texture coordinate
- glVertexAttribPointer(maTextureHandle, 2, GL_FLOAT,
- GL_FALSE, VERTEX_STRIDE, &g_vVertices[4]);
- glEnableVertexAttribArray(maTextureHandle);
-
- // And, finally, execute the GL draw command.
- glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, g_iIndices2);
-
- glBindFramebuffer(GL_FRAMEBUFFER, 0);
- succeeded = true;
- } while (false);
- return succeeded;
-}
-
-const char* SurfaceTextureRenderer::VertexShaderSource() const
-{
- static const char gVertexShader[] =
- "uniform mat4 uSTMatrix;\n"
- "uniform mat4 u_scalingtrans; \n"
- "attribute vec4 aPosition;\n"
- "attribute vec4 aTextureCoord;\n"
- "varying vec2 vTextureNormCoord;\n"
- "void main() {\n"
- " gl_Position = u_scalingtrans * aPosition;\n"
- " vTextureNormCoord = (uSTMatrix * aTextureCoord).xy;\n"
- "}\n";
-
- return gVertexShader;
-}
-
-const char* SurfaceTextureRenderer::FragmentShaderSource() const
-{
- static const char gFragmentShader[] =
- "#extension GL_OES_EGL_image_external : require\n"
- "precision mediump float;\n"
- "varying vec2 vTextureNormCoord;\n"
- "uniform samplerExternalOES sTexture;\n"
- "void main() {\n"
- " gl_FragColor = texture2D(sTexture, vTextureNormCoord);\n"
- "}\n";
-
- return gFragmentShader;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.h b/jni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.h
deleted file mode 100755
index ea2b81ade..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/SurfaceTextureRenderer.h
+++ /dev/null
@@ -1,44 +0,0 @@
-#pragma once
-
-#include "FrameBuffer.h"
-#include "Renderer.h"
-
-#include <GLES2/gl2.h>
-
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-class SurfaceTextureRenderer: public Renderer {
- public:
- SurfaceTextureRenderer();
- virtual ~SurfaceTextureRenderer();
-
- // Initialize OpenGL resources
- // @return true if successful
- bool InitializeGLProgram();
-
- bool DrawTexture(GLfloat *affine);
-
- void SetViewportMatrix(int w, int h, int W, int H);
- void SetScalingMatrix(float xscale, float yscale);
- void SetSTMatrix(float *stmat);
-
- private:
- // Source code for shaders.
- const char* VertexShaderSource() const;
- const char* FragmentShaderSource() const;
-
- // Attribute locations
- GLint mScalingtransLoc;
- GLint muSTMatrixHandle;
- GLint maPositionHandle;
- GLint maTextureHandle;
-
- GLfloat mViewportMatrix[16];
- GLfloat mScalingMatrix[16];
-
- GLfloat mSTMatrix[16];
-
-};
-
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.cpp b/jni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.cpp
deleted file mode 100755
index af6779a3f..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.cpp
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "WarpRenderer.h"
-
-#include <GLES2/gl2ext.h>
-
-const GLfloat g_vVertices[] = {
- -1.f, 1.f, 0.0f, 1.0f, // Position 0
- 0.0f, 1.0f, // TexCoord 0
- 1.f, 1.f, 0.0f, 1.0f, // Position 1
- 1.0f, 1.0f, // TexCoord 1
- -1.f, -1.f, 0.0f, 1.0f, // Position 2
- 0.0f, 0.0f, // TexCoord 2
- 1.f, -1.f, 0.0f, 1.0f, // Position 3
- 1.0f, 0.0f // TexCoord 3
-};
-
-const int VERTEX_STRIDE = 6 * sizeof(GLfloat);
-
-GLushort g_iIndices[] = { 0, 1, 2, 3 };
-
-WarpRenderer::WarpRenderer() : Renderer()
-{
-}
-
-WarpRenderer::~WarpRenderer() {
-}
-
-void WarpRenderer::SetViewportMatrix(int w, int h, int W, int H)
-{
- for(int i=0; i<16; i++)
- {
- mViewportMatrix[i] = 0.0f;
- }
-
- mViewportMatrix[0] = float(w)/float(W);
- mViewportMatrix[5] = float(h)/float(H);
- mViewportMatrix[10] = 1.0f;
- mViewportMatrix[12] = -1.0f + float(w)/float(W);
- mViewportMatrix[13] = -1.0f + float(h)/float(H);
- mViewportMatrix[15] = 1.0f;
-}
-
-void WarpRenderer::SetScalingMatrix(float xscale, float yscale)
-{
- for(int i=0; i<16; i++)
- {
- mScalingMatrix[i] = 0.0f;
- }
-
- mScalingMatrix[0] = xscale;
- mScalingMatrix[5] = yscale;
- mScalingMatrix[10] = 1.0f;
- mScalingMatrix[15] = 1.0f;
-}
-
-bool WarpRenderer::InitializeGLProgram()
-{
- bool succeeded = false;
- do {
- GLuint glProgram;
- glProgram = createProgram(VertexShaderSource(),
- FragmentShaderSource());
- if (!glProgram) {
- break;
- }
-
- glUseProgram(glProgram);
- if (!checkGlError("glUseProgram")) break;
-
- // Get attribute locations
- mPositionLoc = glGetAttribLocation(glProgram, "a_position");
- mAffinetransLoc = glGetUniformLocation(glProgram, "u_affinetrans");
- mViewporttransLoc = glGetUniformLocation(glProgram, "u_viewporttrans");
- mScalingtransLoc = glGetUniformLocation(glProgram, "u_scalingtrans");
- mTexCoordLoc = glGetAttribLocation(glProgram, "a_texCoord");
-
- // Get sampler location
- mSamplerLoc = glGetUniformLocation(glProgram, "s_texture");
-
- mGlProgram = glProgram;
- succeeded = true;
- } while (false);
-
- if (!succeeded && (mGlProgram != 0))
- {
- glDeleteProgram(mGlProgram);
- checkGlError("glDeleteProgram");
- mGlProgram = 0;
- }
- return succeeded;
-}
-
-bool WarpRenderer::DrawTexture(GLfloat *affine)
-{
- bool succeeded = false;
- do {
- bool rt = (mFrameBuffer == NULL)?
- SetupGraphics(mSurfaceWidth, mSurfaceHeight) :
- SetupGraphics(mFrameBuffer);
-
- if(!rt)
- break;
-
- glDisable(GL_BLEND);
-
- glActiveTexture(GL_TEXTURE0);
- if (!checkGlError("glActiveTexture")) break;
-
- const GLenum texture_type = InputTextureType();
- glBindTexture(texture_type, mInputTextureName);
- if (!checkGlError("glBindTexture")) break;
-
- // Set the sampler texture unit to 0
- glUniform1i(mSamplerLoc, 0);
-
- // Load the vertex position
- glVertexAttribPointer(mPositionLoc, 4, GL_FLOAT,
- GL_FALSE, VERTEX_STRIDE, g_vVertices);
-
- // Load the texture coordinate
- glVertexAttribPointer(mTexCoordLoc, 2, GL_FLOAT,
- GL_FALSE, VERTEX_STRIDE, &g_vVertices[4]);
-
- glEnableVertexAttribArray(mPositionLoc);
- glEnableVertexAttribArray(mTexCoordLoc);
-
- // pass matrix information to the vertex shader
- glUniformMatrix4fv(mAffinetransLoc, 1, GL_FALSE, affine);
- glUniformMatrix4fv(mViewporttransLoc, 1, GL_FALSE, mViewportMatrix);
- glUniformMatrix4fv(mScalingtransLoc, 1, GL_FALSE, mScalingMatrix);
-
- // And, finally, execute the GL draw command.
- glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, g_iIndices);
-
- checkGlError("glDrawElements");
-
- glBindFramebuffer(GL_FRAMEBUFFER, 0);
- succeeded = true;
- } while (false);
- return succeeded;
-}
-
-const char* WarpRenderer::VertexShaderSource() const
-{
- static const char gVertexShader[] =
- "uniform mat4 u_affinetrans; \n"
- "uniform mat4 u_viewporttrans; \n"
- "uniform mat4 u_scalingtrans; \n"
- "attribute vec4 a_position; \n"
- "attribute vec2 a_texCoord; \n"
- "varying vec2 v_texCoord; \n"
- "void main() \n"
- "{ \n"
- " gl_Position = u_scalingtrans * u_viewporttrans * u_affinetrans * a_position; \n"
- " v_texCoord = a_texCoord; \n"
- "} \n";
-
- return gVertexShader;
-}
-
-const char* WarpRenderer::FragmentShaderSource() const
-{
- static const char gFragmentShader[] =
- "precision mediump float; \n"
- "varying vec2 v_texCoord; \n"
- "uniform sampler2D s_texture; \n"
- "void main() \n"
- "{ \n"
- " vec4 color; \n"
- " color = texture2D(s_texture, v_texCoord); \n"
- " gl_FragColor = color; \n"
- "} \n";
-
- return gFragmentShader;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.h b/jni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.h
deleted file mode 100755
index 8e9a694ec..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/WarpRenderer.h
+++ /dev/null
@@ -1,48 +0,0 @@
-#pragma once
-
-#include "FrameBuffer.h"
-#include "Renderer.h"
-
-#include <GLES2/gl2.h>
-
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-class WarpRenderer: public Renderer {
- public:
- WarpRenderer();
- virtual ~WarpRenderer();
-
- // Initialize OpenGL resources
- // @return true if successful
- bool InitializeGLProgram();
-
- void SetViewportMatrix(int w, int h, int W, int H);
- void SetScalingMatrix(float xscale, float yscale);
-
- bool DrawTexture(GLfloat *affine);
-
- private:
- // Source code for shaders.
- const char* VertexShaderSource() const;
- const char* FragmentShaderSource() const;
-
- GLuint mTexHandle; // Handle to s_texture.
- GLuint mTexCoordHandle; // Handle to a_texCoord.
- GLuint mTriangleVerticesHandle; // Handle to vPosition.
-
- // Attribute locations
- GLint mPositionLoc;
- GLint mAffinetransLoc;
- GLint mViewporttransLoc;
- GLint mScalingtransLoc;
- GLint mTexCoordLoc;
-
- GLfloat mViewportMatrix[16];
- GLfloat mScalingMatrix[16];
-
- // Sampler location
- GLint mSamplerLoc;
-};
-
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.cpp b/jni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.cpp
deleted file mode 100755
index f7dcf6f61..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.cpp
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "YVURenderer.h"
-
-#include <GLES2/gl2ext.h>
-
-const GLfloat g_vVertices[] = {
- -1.f, 1.f, 0.0f, 1.0f, // Position 0
- 0.0f, 1.0f, // TexCoord 0
- 1.f, 1.f, 0.0f, 1.0f, // Position 1
- 1.0f, 1.0f, // TexCoord 1
- -1.f, -1.f, 0.0f, 1.0f, // Position 2
- 0.0f, 0.0f, // TexCoord 2
- 1.f, -1.f, 0.0f, 1.0f, // Position 3
- 1.0f, 0.0f // TexCoord 3
-};
-
-const int VERTEX_STRIDE = 6 * sizeof(GLfloat);
-
-GLushort g_iIndices3[] = { 0, 1, 2, 3 };
-
-YVURenderer::YVURenderer() : Renderer()
- {
-}
-
-YVURenderer::~YVURenderer() {
-}
-
-bool YVURenderer::InitializeGLProgram()
-{
- bool succeeded = false;
- do {
- GLuint glProgram;
- glProgram = createProgram(VertexShaderSource(),
- FragmentShaderSource());
- if (!glProgram) {
- break;
- }
-
- glUseProgram(glProgram);
- if (!checkGlError("glUseProgram")) break;
-
- // Get attribute locations
- mPositionLoc = glGetAttribLocation(glProgram, "a_Position");
- mTexCoordLoc = glGetAttribLocation(glProgram, "a_texCoord");
-
- // Get sampler location
- mSamplerLoc = glGetUniformLocation(glProgram, "s_texture");
-
- mGlProgram = glProgram;
- succeeded = true;
- } while (false);
-
- if (!succeeded && (mGlProgram != 0))
- {
- glDeleteProgram(mGlProgram);
- checkGlError("glDeleteProgram");
- mGlProgram = 0;
- }
- return succeeded;
-}
-
-bool YVURenderer::DrawTexture()
-{
- bool succeeded = false;
- do {
- bool rt = (mFrameBuffer == NULL)?
- SetupGraphics(mSurfaceWidth, mSurfaceHeight) :
- SetupGraphics(mFrameBuffer);
-
- if(!rt)
- break;
-
- glDisable(GL_BLEND);
-
- glActiveTexture(GL_TEXTURE0);
- if (!checkGlError("glActiveTexture")) break;
-
- const GLenum texture_type = InputTextureType();
- glBindTexture(texture_type, mInputTextureName);
- if (!checkGlError("glBindTexture")) break;
-
- // Set the sampler texture unit to 0
- glUniform1i(mSamplerLoc, 0);
-
- // Load the vertex position
- glVertexAttribPointer(mPositionLoc, 4, GL_FLOAT,
- GL_FALSE, VERTEX_STRIDE, g_vVertices);
-
- // Load the texture coordinate
- glVertexAttribPointer(mTexCoordLoc, 2, GL_FLOAT,
- GL_FALSE, VERTEX_STRIDE, &g_vVertices[4]);
-
- glEnableVertexAttribArray(mPositionLoc);
- glEnableVertexAttribArray(mTexCoordLoc);
-
- // And, finally, execute the GL draw command.
- glDrawElements(GL_TRIANGLE_STRIP, 4, GL_UNSIGNED_SHORT, g_iIndices3);
-
- checkGlError("glDrawElements");
-
- glBindFramebuffer(GL_FRAMEBUFFER, 0);
- succeeded = true;
- } while (false);
- return succeeded;
-}
-
-const char* YVURenderer::VertexShaderSource() const
-{
- // All this really does is copy the coordinates into
- // variables for the fragment shader to pick up.
- static const char gVertexShader[] =
- "attribute vec4 a_Position;\n"
- "attribute vec2 a_texCoord;\n"
- "varying vec2 v_texCoord;\n"
- "void main() {\n"
- " gl_Position = a_Position;\n"
- " v_texCoord = a_texCoord;\n"
- "}\n";
-
- return gVertexShader;
-}
-
-const char* YVURenderer::FragmentShaderSource() const
-{
- static const char gFragmentShader[] =
- "precision mediump float;\n"
- "uniform sampler2D s_texture;\n"
- "const vec4 coeff_y = vec4(0.257, 0.594, 0.098, 0.063);\n"
- "const vec4 coeff_v = vec4(0.439, -0.368, -0.071, 0.500);\n"
- "const vec4 coeff_u = vec4(-0.148, -0.291, 0.439, 0.500);\n"
- "varying vec2 v_texCoord;\n"
- "void main() {\n"
- " vec4 p;\n"
- " p = texture2D(s_texture, v_texCoord);\n"
- " gl_FragColor[0] = dot(p, coeff_y);\n"
- " p = texture2D(s_texture, v_texCoord);\n"
- " gl_FragColor[1] = dot(p, coeff_v);\n"
- " p = texture2D(s_texture, v_texCoord);\n"
- " gl_FragColor[2] = dot(p, coeff_u);\n"
- " p = texture2D(s_texture, v_texCoord);\n"
- " gl_FragColor[3] = dot(p, coeff_y);\n"
- "}\n";
-
- return gFragmentShader;
-}
diff --git a/jni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.h b/jni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.h
deleted file mode 100755
index d14a4b990..000000000
--- a/jni_mosaic/feature_mos/src/mosaic_renderer/YVURenderer.h
+++ /dev/null
@@ -1,35 +0,0 @@
-#pragma once
-
-#include "FrameBuffer.h"
-#include "Renderer.h"
-
-#include <GLES2/gl2.h>
-
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-
-class YVURenderer: public Renderer {
- public:
- YVURenderer();
- virtual ~YVURenderer();
-
- // Initialize OpenGL resources
- // @return true if successful
- bool InitializeGLProgram();
-
- bool DrawTexture();
-
- private:
- // Source code for shaders.
- const char* VertexShaderSource() const;
- const char* FragmentShaderSource() const;
-
- // Attribute locations
- GLint mPositionLoc;
- GLint mTexCoordLoc;
-
- // Sampler location
- GLint mSamplerLoc;
-};
-
diff --git a/jni_mosaic/feature_stab/src/dbreg/dbreg.cpp b/jni_mosaic/feature_stab/src/dbreg/dbreg.cpp
deleted file mode 100644
index da06aa2ab..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/dbreg.cpp
+++ /dev/null
@@ -1,794 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// $Id: dbreg.cpp,v 1.31 2011/06/17 14:04:32 mbansal Exp $
-#include "dbreg.h"
-#include <string.h>
-#include <stdio.h>
-
-
-#if PROFILE
-#endif
-
-//#include <iostream>
-
-db_FrameToReferenceRegistration::db_FrameToReferenceRegistration() :
- m_initialized(false),m_nr_matches(0),m_over_allocation(256),m_nr_bins(20),m_max_cost_pix(30), m_quarter_resolution(false)
-{
- m_reference_image = NULL;
- m_aligned_ins_image = NULL;
-
- m_quarter_res_image = NULL;
- m_horz_smooth_subsample_image = NULL;
-
- m_x_corners_ref = NULL;
- m_y_corners_ref = NULL;
-
- m_x_corners_ins = NULL;
- m_y_corners_ins = NULL;
-
- m_match_index_ref = NULL;
- m_match_index_ins = NULL;
-
- m_inlier_indices = NULL;
-
- m_num_inlier_indices = 0;
-
- m_temp_double = NULL;
- m_temp_int = NULL;
-
- m_corners_ref = NULL;
- m_corners_ins = NULL;
-
- m_sq_cost = NULL;
- m_cost_histogram = NULL;
-
- profile_string = NULL;
-
- db_Identity3x3(m_K);
- db_Identity3x3(m_H_ref_to_ins);
- db_Identity3x3(m_H_dref_to_ref);
-
- m_sq_cost_computed = false;
- m_reference_set = false;
-
- m_reference_update_period = 0;
- m_nr_frames_processed = 0;
-
- return;
-}
-
-db_FrameToReferenceRegistration::~db_FrameToReferenceRegistration()
-{
- Clean();
-}
-
-void db_FrameToReferenceRegistration::Clean()
-{
- if ( m_reference_image )
- db_FreeImage_u(m_reference_image,m_im_height);
-
- if ( m_aligned_ins_image )
- db_FreeImage_u(m_aligned_ins_image,m_im_height);
-
- if ( m_quarter_res_image )
- {
- db_FreeImage_u(m_quarter_res_image, m_im_height);
- }
-
- if ( m_horz_smooth_subsample_image )
- {
- db_FreeImage_u(m_horz_smooth_subsample_image, m_im_height*2);
- }
-
- delete [] m_x_corners_ref;
- delete [] m_y_corners_ref;
-
- delete [] m_x_corners_ins;
- delete [] m_y_corners_ins;
-
- delete [] m_match_index_ref;
- delete [] m_match_index_ins;
-
- delete [] m_temp_double;
- delete [] m_temp_int;
-
- delete [] m_corners_ref;
- delete [] m_corners_ins;
-
- delete [] m_sq_cost;
- delete [] m_cost_histogram;
-
- delete [] m_inlier_indices;
-
- if(profile_string)
- delete [] profile_string;
-
- m_reference_image = NULL;
- m_aligned_ins_image = NULL;
-
- m_quarter_res_image = NULL;
- m_horz_smooth_subsample_image = NULL;
-
- m_x_corners_ref = NULL;
- m_y_corners_ref = NULL;
-
- m_x_corners_ins = NULL;
- m_y_corners_ins = NULL;
-
- m_match_index_ref = NULL;
- m_match_index_ins = NULL;
-
- m_inlier_indices = NULL;
-
- m_temp_double = NULL;
- m_temp_int = NULL;
-
- m_corners_ref = NULL;
- m_corners_ins = NULL;
-
- m_sq_cost = NULL;
- m_cost_histogram = NULL;
-}
-
-void db_FrameToReferenceRegistration::Init(int width, int height,
- int homography_type,
- int max_iterations,
- bool linear_polish,
- bool quarter_resolution,
- double scale,
- unsigned int reference_update_period,
- bool do_motion_smoothing,
- double motion_smoothing_gain,
- int nr_samples,
- int chunk_size,
- int cd_target_nr_corners,
- double cm_max_disparity,
- bool cm_use_smaller_matching_window,
- int cd_nr_horz_blocks,
- int cd_nr_vert_blocks
- )
-{
- Clean();
-
- m_reference_update_period = reference_update_period;
- m_nr_frames_processed = 0;
-
- m_do_motion_smoothing = do_motion_smoothing;
- m_motion_smoothing_gain = motion_smoothing_gain;
-
- m_stab_smoother.setSmoothingFactor(m_motion_smoothing_gain);
-
- m_quarter_resolution = quarter_resolution;
-
- profile_string = new char[10240];
-
- if (m_quarter_resolution == true)
- {
- width = width/2;
- height = height/2;
-
- m_horz_smooth_subsample_image = db_AllocImage_u(width,height*2,m_over_allocation);
- m_quarter_res_image = db_AllocImage_u(width,height,m_over_allocation);
- }
-
- m_im_width = width;
- m_im_height = height;
-
- double temp[9];
- db_Approx3DCalMat(m_K,temp,m_im_width,m_im_height);
-
- m_homography_type = homography_type;
- m_max_iterations = max_iterations;
- m_scale = 2/(m_K[0]+m_K[4]);
- m_nr_samples = nr_samples;
- m_chunk_size = chunk_size;
-
- double outlier_t1 = 5.0;
-
- m_outlier_t2 = outlier_t1*outlier_t1;//*m_scale*m_scale;
-
- m_current_is_reference = false;
-
- m_linear_polish = linear_polish;
-
- m_reference_image = db_AllocImage_u(m_im_width,m_im_height,m_over_allocation);
- m_aligned_ins_image = db_AllocImage_u(m_im_width,m_im_height,m_over_allocation);
-
- // initialize feature detection and matching:
- //m_max_nr_corners = m_cd.Init(m_im_width,m_im_height,cd_target_nr_corners,cd_nr_horz_blocks,cd_nr_vert_blocks,0.0,0.0);
- m_max_nr_corners = m_cd.Init(m_im_width,m_im_height,cd_target_nr_corners,cd_nr_horz_blocks,cd_nr_vert_blocks,DB_DEFAULT_ABS_CORNER_THRESHOLD/500.0,0.0);
-
- int use_21 = 0;
- m_max_nr_matches = m_cm.Init(m_im_width,m_im_height,cm_max_disparity,m_max_nr_corners,DB_DEFAULT_NO_DISPARITY,cm_use_smaller_matching_window,use_21);
-
- // allocate space for corner feature locations for reference and inspection images:
- m_x_corners_ref = new double [m_max_nr_corners];
- m_y_corners_ref = new double [m_max_nr_corners];
-
- m_x_corners_ins = new double [m_max_nr_corners];
- m_y_corners_ins = new double [m_max_nr_corners];
-
- // allocate space for match indices:
- m_match_index_ref = new int [m_max_nr_matches];
- m_match_index_ins = new int [m_max_nr_matches];
-
- m_temp_double = new double [12*DB_DEFAULT_NR_SAMPLES+10*m_max_nr_matches];
- m_temp_int = new int [db_maxi(DB_DEFAULT_NR_SAMPLES,m_max_nr_matches)];
-
- // allocate space for homogenous image points:
- m_corners_ref = new double [3*m_max_nr_corners];
- m_corners_ins = new double [3*m_max_nr_corners];
-
- // allocate cost array and histogram:
- m_sq_cost = new double [m_max_nr_matches];
- m_cost_histogram = new int [m_nr_bins];
-
- // reserve array:
- //m_inlier_indices.reserve(m_max_nr_matches);
- m_inlier_indices = new int[m_max_nr_matches];
-
- m_initialized = true;
-
- m_max_inlier_count = 0;
-}
-
-
-#define MB 0
-// Save the reference image, detect features and update the dref-to-ref transformation
-int db_FrameToReferenceRegistration::UpdateReference(const unsigned char * const * im, bool subsample, bool detect_corners)
-{
- double temp[9];
- db_Multiply3x3_3x3(temp,m_H_dref_to_ref,m_H_ref_to_ins);
- db_Copy9(m_H_dref_to_ref,temp);
-
- const unsigned char * const * imptr = im;
-
- if (m_quarter_resolution && subsample)
- {
- GenerateQuarterResImage(im);
- imptr = m_quarter_res_image;
- }
-
- // save the reference image, detect features and quit
- db_CopyImage_u(m_reference_image,imptr,m_im_width,m_im_height,m_over_allocation);
-
- if(detect_corners)
- {
- #if MB
- m_cd.DetectCorners(imptr, m_x_corners_ref,m_y_corners_ref,&m_nr_corners_ref);
- int nr = 0;
- for(int k=0; k<m_nr_corners_ref; k++)
- {
- if(m_x_corners_ref[k]>m_im_width/3)
- {
- m_x_corners_ref[nr] = m_x_corners_ref[k];
- m_y_corners_ref[nr] = m_y_corners_ref[k];
- nr++;
- }
-
- }
- m_nr_corners_ref = nr;
- #else
- m_cd.DetectCorners(imptr, m_x_corners_ref,m_y_corners_ref,&m_nr_corners_ref);
- #endif
- }
- else
- {
- m_nr_corners_ref = m_nr_corners_ins;
-
- for(int k=0; k<m_nr_corners_ins; k++)
- {
- m_x_corners_ref[k] = m_x_corners_ins[k];
- m_y_corners_ref[k] = m_y_corners_ins[k];
- }
-
- }
-
- db_Identity3x3(m_H_ref_to_ins);
-
- m_max_inlier_count = 0; // Reset to 0 as no inliers seen until now
- m_sq_cost_computed = false;
- m_reference_set = true;
- m_current_is_reference = true;
- return 1;
-}
-
-void db_FrameToReferenceRegistration::Get_H_dref_to_ref(double H[9])
-{
- db_Copy9(H,m_H_dref_to_ref);
-}
-
-void db_FrameToReferenceRegistration::Get_H_dref_to_ins(double H[9])
-{
- db_Multiply3x3_3x3(H,m_H_dref_to_ref,m_H_ref_to_ins);
-}
-
-void db_FrameToReferenceRegistration::Set_H_dref_to_ins(double H[9])
-{
- double H_ins_to_ref[9];
-
- db_Identity3x3(H_ins_to_ref); // Ensure it has proper values
- db_InvertAffineTransform(H_ins_to_ref,m_H_ref_to_ins); // Invert to get ins to ref
- db_Multiply3x3_3x3(m_H_dref_to_ref,H,H_ins_to_ref); // Update dref to ref using the input H from dref to ins
-}
-
-
-void db_FrameToReferenceRegistration::ResetDisplayReference()
-{
- db_Identity3x3(m_H_dref_to_ref);
-}
-
-bool db_FrameToReferenceRegistration::NeedReferenceUpdate()
-{
- // If less than 50% of the starting number of inliers left, then its time to update the reference.
- if(m_max_inlier_count>0 && float(m_num_inlier_indices)/float(m_max_inlier_count)<0.5)
- return true;
- else
- return false;
-}
-
-int db_FrameToReferenceRegistration::AddFrame(const unsigned char * const * im, double H[9],bool force_reference,bool prewarp)
-{
- m_current_is_reference = false;
- if(!m_reference_set || force_reference)
- {
- db_Identity3x3(m_H_ref_to_ins);
- db_Copy9(H,m_H_ref_to_ins);
-
- UpdateReference(im,true,true);
- return 0;
- }
-
- const unsigned char * const * imptr = im;
-
- if (m_quarter_resolution)
- {
- if (m_quarter_res_image)
- {
- GenerateQuarterResImage(im);
- }
-
- imptr = (const unsigned char * const* )m_quarter_res_image;
- }
-
- double H_last[9];
- db_Copy9(H_last,m_H_ref_to_ins);
- db_Identity3x3(m_H_ref_to_ins);
-
- m_sq_cost_computed = false;
-
- // detect corners on inspection image and match to reference image features:s
-
- // @jke - Adding code to time the functions. TODO: Remove after test
-#if PROFILE
- double iTimer1, iTimer2;
- char str[255];
- strcpy(profile_string,"\n");
- sprintf(str,"[%dx%d] %p\n",m_im_width,m_im_height,im);
- strcat(profile_string, str);
-#endif
-
- // @jke - Adding code to time the functions. TODO: Remove after test
-#if PROFILE
- iTimer1 = now_ms();
-#endif
- m_cd.DetectCorners(imptr, m_x_corners_ins,m_y_corners_ins,&m_nr_corners_ins);
- // @jke - Adding code to time the functions. TODO: Remove after test
-# if PROFILE
- iTimer2 = now_ms();
- double elapsedTimeCorner = iTimer2 - iTimer1;
- sprintf(str,"Corner Detection [%d corners] = %g ms\n",m_nr_corners_ins, elapsedTimeCorner);
- strcat(profile_string, str);
-#endif
-
- // @jke - Adding code to time the functions. TODO: Remove after test
-#if PROFILE
- iTimer1 = now_ms();
-#endif
- if(prewarp)
- m_cm.Match(m_reference_image,imptr,m_x_corners_ref,m_y_corners_ref,m_nr_corners_ref,
- m_x_corners_ins,m_y_corners_ins,m_nr_corners_ins,
- m_match_index_ref,m_match_index_ins,&m_nr_matches,H,0);
- else
- m_cm.Match(m_reference_image,imptr,m_x_corners_ref,m_y_corners_ref,m_nr_corners_ref,
- m_x_corners_ins,m_y_corners_ins,m_nr_corners_ins,
- m_match_index_ref,m_match_index_ins,&m_nr_matches);
- // @jke - Adding code to time the functions. TODO: Remove after test
-# if PROFILE
- iTimer2 = now_ms();
- double elapsedTimeMatch = iTimer2 - iTimer1;
- sprintf(str,"Matching [%d] = %g ms\n",m_nr_matches,elapsedTimeMatch);
- strcat(profile_string, str);
-#endif
-
-
- // copy out matching features:
- for ( int i = 0; i < m_nr_matches; ++i )
- {
- int offset = 3*i;
- m_corners_ref[offset ] = m_x_corners_ref[m_match_index_ref[i]];
- m_corners_ref[offset+1] = m_y_corners_ref[m_match_index_ref[i]];
- m_corners_ref[offset+2] = 1.0;
-
- m_corners_ins[offset ] = m_x_corners_ins[m_match_index_ins[i]];
- m_corners_ins[offset+1] = m_y_corners_ins[m_match_index_ins[i]];
- m_corners_ins[offset+2] = 1.0;
- }
-
- // @jke - Adding code to time the functions. TODO: Remove after test
-#if PROFILE
- iTimer1 = now_ms();
-#endif
- // perform the alignment:
- db_RobImageHomography(m_H_ref_to_ins, m_corners_ref, m_corners_ins, m_nr_matches, m_K, m_K, m_temp_double, m_temp_int,
- m_homography_type,NULL,m_max_iterations,m_max_nr_matches,m_scale,
- m_nr_samples, m_chunk_size);
- // @jke - Adding code to time the functions. TODO: Remove after test
-# if PROFILE
- iTimer2 = now_ms();
- double elapsedTimeHomography = iTimer2 - iTimer1;
- sprintf(str,"Homography = %g ms\n",elapsedTimeHomography);
- strcat(profile_string, str);
-#endif
-
-
- SetOutlierThreshold();
-
- // Compute the inliers for the db compute m_H_ref_to_ins
- ComputeInliers(m_H_ref_to_ins);
-
- // Update the max inlier count
- m_max_inlier_count = (m_max_inlier_count > m_num_inlier_indices)?m_max_inlier_count:m_num_inlier_indices;
-
- // Fit a least-squares model to just the inliers and put it in m_H_ref_to_ins
- if(m_linear_polish)
- Polish(m_inlier_indices, m_num_inlier_indices);
-
- if (m_quarter_resolution)
- {
- m_H_ref_to_ins[2] *= 2.0;
- m_H_ref_to_ins[5] *= 2.0;
- }
-
-#if PROFILE
- sprintf(str,"#Inliers = %d \n",m_num_inlier_indices);
- strcat(profile_string, str);
-#endif
-/*
- ///// CHECK IF CURRENT TRANSFORMATION GOOD OR BAD ////
- ///// IF BAD, then update reference to the last correctly aligned inspection frame;
- if(m_num_inlier_indices<5)//0.9*m_nr_matches || m_nr_matches < 20)
- {
- db_Copy9(m_H_ref_to_ins,H_last);
- UpdateReference(imptr,false);
-// UpdateReference(m_aligned_ins_image,false);
- }
- else
- {
- ///// IF GOOD, then update the last correctly aligned inspection frame to be this;
- //db_CopyImage_u(m_aligned_ins_image,imptr,m_im_width,m_im_height,m_over_allocation);
-*/
- if(m_do_motion_smoothing)
- SmoothMotion();
-
- // Disable debug printing
- // db_PrintDoubleMatrix(m_H_ref_to_ins,3,3);
-
- db_Copy9(H, m_H_ref_to_ins);
-
- m_nr_frames_processed++;
-{
- if ( (m_nr_frames_processed % m_reference_update_period) == 0 )
- {
- //UpdateReference(imptr,false, false);
-
- #if MB
- UpdateReference(imptr,false, true);
- #else
- UpdateReference(imptr,false, false);
- #endif
- }
-
-
- }
-
-
-
- return 1;
-}
-
-//void db_FrameToReferenceRegistration::ComputeInliers(double H[9],std::vector<int> &inlier_indices)
-void db_FrameToReferenceRegistration::ComputeInliers(double H[9])
-{
- double totnummatches = m_nr_matches;
- int inliercount=0;
-
- m_num_inlier_indices = 0;
-// inlier_indices.clear();
-
- for(int c=0; c < totnummatches; c++ )
- {
- if (m_sq_cost[c] <= m_outlier_t2)
- {
- m_inlier_indices[inliercount] = c;
- inliercount++;
- }
- }
-
- m_num_inlier_indices = inliercount;
- double frac=inliercount/totnummatches;
-}
-
-//void db_FrameToReferenceRegistration::Polish(std::vector<int> &inlier_indices)
-void db_FrameToReferenceRegistration::Polish(int *inlier_indices, int &num_inlier_indices)
-{
- db_Zero(m_polish_C,36);
- db_Zero(m_polish_D,6);
- for (int i=0;i<num_inlier_indices;i++)
- {
- int j = 3*inlier_indices[i];
- m_polish_C[0]+=m_corners_ref[j]*m_corners_ref[j];
- m_polish_C[1]+=m_corners_ref[j]*m_corners_ref[j+1];
- m_polish_C[2]+=m_corners_ref[j];
- m_polish_C[7]+=m_corners_ref[j+1]*m_corners_ref[j+1];
- m_polish_C[8]+=m_corners_ref[j+1];
- m_polish_C[14]+=1;
- m_polish_D[0]+=m_corners_ref[j]*m_corners_ins[j];
- m_polish_D[1]+=m_corners_ref[j+1]*m_corners_ins[j];
- m_polish_D[2]+=m_corners_ins[j];
- m_polish_D[3]+=m_corners_ref[j]*m_corners_ins[j+1];
- m_polish_D[4]+=m_corners_ref[j+1]*m_corners_ins[j+1];
- m_polish_D[5]+=m_corners_ins[j+1];
- }
-
- double a=db_maxd(m_polish_C[0],m_polish_C[7]);
- m_polish_C[0]/=a; m_polish_C[1]/=a; m_polish_C[2]/=a;
- m_polish_C[7]/=a; m_polish_C[8]/=a; m_polish_C[14]/=a;
-
- m_polish_D[0]/=a; m_polish_D[1]/=a; m_polish_D[2]/=a;
- m_polish_D[3]/=a; m_polish_D[4]/=a; m_polish_D[5]/=a;
-
-
- m_polish_C[6]=m_polish_C[1];
- m_polish_C[12]=m_polish_C[2];
- m_polish_C[13]=m_polish_C[8];
-
- m_polish_C[21]=m_polish_C[0]; m_polish_C[22]=m_polish_C[1]; m_polish_C[23]=m_polish_C[2];
- m_polish_C[28]=m_polish_C[7]; m_polish_C[29]=m_polish_C[8];
- m_polish_C[35]=m_polish_C[14];
-
-
- double d[6];
- db_CholeskyDecomp6x6(m_polish_C,d);
- db_CholeskyBacksub6x6(m_H_ref_to_ins,m_polish_C,d,m_polish_D);
-}
-
-void db_FrameToReferenceRegistration::EstimateSecondaryModel(double H[9])
-{
- /* if ( m_current_is_reference )
- {
- db_Identity3x3(H);
- return;
- }
- */
-
- // select the outliers of the current model:
- SelectOutliers();
-
- // perform the alignment:
- db_RobImageHomography(m_H_ref_to_ins, m_corners_ref, m_corners_ins, m_nr_matches, m_K, m_K, m_temp_double, m_temp_int,
- m_homography_type,NULL,m_max_iterations,m_max_nr_matches,m_scale,
- m_nr_samples, m_chunk_size);
-
- db_Copy9(H,m_H_ref_to_ins);
-}
-
-void db_FrameToReferenceRegistration::ComputeCostArray()
-{
- if ( m_sq_cost_computed ) return;
-
- for( int c=0, k=0 ;c < m_nr_matches; c++, k=k+3)
- {
- m_sq_cost[c] = SquaredInhomogenousHomographyError(m_corners_ins+k,m_H_ref_to_ins,m_corners_ref+k);
- }
-
- m_sq_cost_computed = true;
-}
-
-void db_FrameToReferenceRegistration::SelectOutliers()
-{
- int nr_outliers=0;
-
- ComputeCostArray();
-
- for(int c=0, k=0 ;c<m_nr_matches;c++,k=k+3)
- {
- if (m_sq_cost[c] > m_outlier_t2)
- {
- int offset = 3*nr_outliers++;
- db_Copy3(m_corners_ref+offset,m_corners_ref+k);
- db_Copy3(m_corners_ins+offset,m_corners_ins+k);
- }
- }
-
- m_nr_matches = nr_outliers;
-}
-
-void db_FrameToReferenceRegistration::ComputeCostHistogram()
-{
- ComputeCostArray();
-
- for ( int b = 0; b < m_nr_bins; ++b )
- m_cost_histogram[b] = 0;
-
- for(int c = 0; c < m_nr_matches; c++)
- {
- double error = db_SafeSqrt(m_sq_cost[c]);
- int bin = (int)(error/m_max_cost_pix*m_nr_bins);
- if ( bin < m_nr_bins )
- m_cost_histogram[bin]++;
- else
- m_cost_histogram[m_nr_bins-1]++;
- }
-
-/*
- for ( int i = 0; i < m_nr_bins; ++i )
- std::cout << m_cost_histogram[i] << " ";
- std::cout << std::endl;
-*/
-}
-
-void db_FrameToReferenceRegistration::SetOutlierThreshold()
-{
- ComputeCostHistogram();
-
- int i = 0, last=0;
- for (; i < m_nr_bins-1; ++i )
- {
- if ( last > m_cost_histogram[i] )
- break;
- last = m_cost_histogram[i];
- }
-
- //std::cout << "I " << i << std::endl;
-
- int max = m_cost_histogram[i];
-
- for (; i < m_nr_bins-1; ++i )
- {
- if ( m_cost_histogram[i] < (int)(0.1*max) )
- //if ( last < m_cost_histogram[i] )
- break;
- last = m_cost_histogram[i];
- }
- //std::cout << "J " << i << std::endl;
-
- m_outlier_t2 = db_sqr(i*m_max_cost_pix/m_nr_bins);
-
- //std::cout << "m_outlier_t2 " << m_outlier_t2 << std::endl;
-}
-
-void db_FrameToReferenceRegistration::SmoothMotion(void)
-{
- VP_MOTION inmot,outmot;
-
- double H[9];
-
- Get_H_dref_to_ins(H);
-
- MXX(inmot) = H[0];
- MXY(inmot) = H[1];
- MXZ(inmot) = H[2];
- MXW(inmot) = 0.0;
-
- MYX(inmot) = H[3];
- MYY(inmot) = H[4];
- MYZ(inmot) = H[5];
- MYW(inmot) = 0.0;
-
- MZX(inmot) = H[6];
- MZY(inmot) = H[7];
- MZZ(inmot) = H[8];
- MZW(inmot) = 0.0;
-
- MWX(inmot) = 0.0;
- MWY(inmot) = 0.0;
- MWZ(inmot) = 0.0;
- MWW(inmot) = 1.0;
-
- inmot.type = VP_MOTION_AFFINE;
-
- int w = m_im_width;
- int h = m_im_height;
-
- if(m_quarter_resolution)
- {
- w = w*2;
- h = h*2;
- }
-
-#if 0
- m_stab_smoother.smoothMotionAdaptive(w,h,&inmot,&outmot);
-#else
- m_stab_smoother.smoothMotion(&inmot,&outmot);
-#endif
-
- H[0] = MXX(outmot);
- H[1] = MXY(outmot);
- H[2] = MXZ(outmot);
-
- H[3] = MYX(outmot);
- H[4] = MYY(outmot);
- H[5] = MYZ(outmot);
-
- H[6] = MZX(outmot);
- H[7] = MZY(outmot);
- H[8] = MZZ(outmot);
-
- Set_H_dref_to_ins(H);
-}
-
-void db_FrameToReferenceRegistration::GenerateQuarterResImage(const unsigned char* const* im)
-{
- int input_h = m_im_height*2;
- int input_w = m_im_width*2;
-
- for (int j = 0; j < input_h; j++)
- {
- const unsigned char* in_row_ptr = im[j];
- unsigned char* out_row_ptr = m_horz_smooth_subsample_image[j]+1;
-
- for (int i = 2; i < input_w-2; i += 2)
- {
- int smooth_val = (
- 6*in_row_ptr[i] +
- ((in_row_ptr[i-1]+in_row_ptr[i+1])<<2) +
- in_row_ptr[i-2]+in_row_ptr[i+2]
- ) >> 4;
- *out_row_ptr++ = (unsigned char) smooth_val;
-
- if ( (smooth_val < 0) || (smooth_val > 255))
- {
- return;
- }
-
- }
- }
-
- for (int j = 2; j < input_h-2; j+=2)
- {
-
- unsigned char* in_row_ptr = m_horz_smooth_subsample_image[j];
- unsigned char* out_row_ptr = m_quarter_res_image[j/2];
-
- for (int i = 1; i < m_im_width-1; i++)
- {
- int smooth_val = (
- 6*in_row_ptr[i] +
- ((in_row_ptr[i-m_im_width]+in_row_ptr[i+m_im_width]) << 2)+
- in_row_ptr[i-2*m_im_width]+in_row_ptr[i+2*m_im_width]
- ) >> 4;
- *out_row_ptr++ = (unsigned char)smooth_val;
-
- if ( (smooth_val < 0) || (smooth_val > 255))
- {
- return;
- }
-
- }
- }
-}
diff --git a/jni_mosaic/feature_stab/src/dbreg/dbreg.h b/jni_mosaic/feature_stab/src/dbreg/dbreg.h
deleted file mode 100644
index 4eb244481..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/dbreg.h
+++ /dev/null
@@ -1,581 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-#pragma once
-
-#ifdef _WIN32
-#ifdef DBREG_EXPORTS
-#define DBREG_API __declspec(dllexport)
-#else
-#define DBREG_API __declspec(dllimport)
-#endif
-#else
-#define DBREG_API
-#endif
-
-// @jke - the next few lines are for extracting timing data. TODO: Remove after test
-#define PROFILE 0
-
-#include "dbstabsmooth.h"
-
-#include <db_feature_detection.h>
-#include <db_feature_matching.h>
-#include <db_rob_image_homography.h>
-
-#if PROFILE
- #include <sys/time.h>
-#endif
-
-/*! \mainpage db_FrameToReferenceRegistration
-
- \section intro Introduction
-
- db_FrameToReferenceRegistration provides a simple interface to a set of sophisticated algorithms for stabilizing
- video sequences. As its name suggests, the class is used to compute parameters that will allow us to warp incoming video
- frames and register them with respect to a so-called <i>reference</i> frame. The reference frame is simply the first
- frame of a sequence; the registration process is that of estimating the parameters of a warp that can be applied to
- subsequent frames to make those frames align with the reference. A video made up of these warped frames will be more
- stable than the input video.
-
- For more technical information on the internal structure of the algorithms used within the db_FrameToRegistration class,
- please follow this <a href="../Sarnoff image registration.docx">link</a>.
-
- \section usage Usage
- In addition to the class constructor, there are two main functions of db_FrameToReferenceRegistration that are of
- interest to the programmer. db_FrameToReferenceRegistration::Init(...) is used to initialize the parameters of the
- registration algorithm. db_FrameToReferenceRegistration::AddFrame(...) is the method by which each new video frame
- is introduced to the registration algorithm, and produces the estimated registration warp parameters.
-
- The following example illustrates how the major methods of the class db_FrameToReferenceRegistration can be used together
- to calculate the registration parameters for an image sequence. In the example, the calls to the methods of
- db_FrameToReferenceRegistration match those found in the API, but supporting code should be considered pseudo-code.
- For a more complete example, please consult the source code for dbregtest.
-
-
- \code
- // feature-based image registration class:
- db_FrameToReferenceRegistration reg;
-
- // Image data
- const unsigned char * const * image_storage;
-
- // The 3x3 frame to reference registration parameters
- double frame_to_ref_homography[9];
-
- // a counter to count the number of frames processed.
- unsigned long frame_counter;
- // ...
-
- // main loop - keep going while there are images to process.
- while (ImagesAreAvailable)
- {
- // Call functions to place latest data into image_storage
- // ...
-
- // if the registration object is not yet initialized, then do so
- // The arguments to this function are explained in the accompanying
- // html API documentation
- if (!reg.Initialized())
- {
- reg.Init(w,h,motion_model_type,25,linear_polish,quarter_resolution,
- DB_POINT_STANDARDDEV,reference_update_period,
- do_motion_smoothing,motion_smoothing_gain,
- DB_DEFAULT_NR_SAMPLES,DB_DEFAULT_CHUNK_SIZE,
- nr_corners,max_disparity);
- }
-
- // Present the new image data to the registration algorithm,
- // with the result being stored in the frame_to_ref_homography
- // variable.
- reg.AddFrame(image_storage,frame_to_ref_homography);
-
- // frame_to_ref_homography now contains the stabilizing transform
- // use this to warp the latest image for display, etc.
-
- // if this is the first frame, we need to tell the registration
- // class to store the image as its reference. Otherwise, AddFrame
- // takes care of that.
- if (frame_counter == 0)
- {
- reg.UpdateReference(image_storage);
- }
-
- // increment the frame counter
- frame_counter++;
- }
-
- \endcode
-
- */
-
-/*!
- * Performs feature-based frame to reference image registration.
- */
-class DBREG_API db_FrameToReferenceRegistration
-{
-public:
- db_FrameToReferenceRegistration(void);
- ~db_FrameToReferenceRegistration();
-
- /*!
- * Set parameters and allocate memory. Note: The default values of these parameters have been set to the values used for the android implementation (i.e. the demo APK).
- * \param width image width
- * \param height image height
- * \param homography_type see definitions in \ref LMRobImageHomography
- * \param max_iterations max number of polishing steps
- * \param linear_polish whether to perform a linear polishing step after RANSAC
- * \param quarter_resolution whether to process input images at quarter resolution (for computational efficiency)
- * \param scale Cauchy scale coefficient (see db_ExpCauchyReprojectionError() )
- * \param reference_update_period how often to update the alignment reference (in units of number of frames)
- * \param do_motion_smoothing whether to perform display reference smoothing
- * \param motion_smoothing_gain weight factor to reflect how fast the display reference must follow the current frame if motion smoothing is enabled
- * \param nr_samples number of times to compute a hypothesis
- * \param chunk_size size of cost chunks
- * \param cd_target_nr_corners target number of corners for corner detector
- * \param cm_max_disparity maximum disparity search range for corner matcher (in units of ratio of image width)
- * \param cm_use_smaller_matching_window if set to true, uses a correlation window of 5x5 instead of the default 11x11
- * \param cd_nr_horz_blocks the number of horizontal blocks for the corner detector to partition the image
- * \param cd_nr_vert_blocks the number of vertical blocks for the corner detector to partition the image
- */
- void Init(int width, int height,
- int homography_type = DB_HOMOGRAPHY_TYPE_DEFAULT,
- int max_iterations = DB_DEFAULT_MAX_ITERATIONS,
- bool linear_polish = false,
- bool quarter_resolution = true,
- double scale = DB_POINT_STANDARDDEV,
- unsigned int reference_update_period = 3,
- bool do_motion_smoothing = false,
- double motion_smoothing_gain = 0.75,
- int nr_samples = DB_DEFAULT_NR_SAMPLES,
- int chunk_size = DB_DEFAULT_CHUNK_SIZE,
- int cd_target_nr_corners = 500,
- double cm_max_disparity = 0.2,
- bool cm_use_smaller_matching_window = false,
- int cd_nr_horz_blocks = 5,
- int cd_nr_vert_blocks = 5);
-
- /*!
- * Reset the transformation type that is being use to perform alignment. Use this to change the alignment type at run time.
- * \param homography_type the type of transformation to use for performing alignment (see definitions in \ref LMRobImageHomography)
- */
- void ResetHomographyType(int homography_type) { m_homography_type = homography_type; }
-
- /*!
- * Enable/Disable motion smoothing. Use this to turn motion smoothing on/off at run time.
- * \param enable flag indicating whether to turn the motion smoothing on or off.
- */
- void ResetSmoothing(bool enable) { m_do_motion_smoothing = enable; }
-
- /*!
- * Align an inspection image to an existing reference image, update the reference image if due and perform motion smoothing if enabled.
- * \param im new inspection image
- * \param H computed transformation from reference to inspection coordinate frame. Identity is returned if no reference frame was set.
- * \param force_reference make this the new reference image
- */
- int AddFrame(const unsigned char * const * im, double H[9], bool force_reference=false, bool prewarp=false);
-
- /*!
- * Returns true if Init() was run.
- */
- bool Initialized() const { return m_initialized; }
-
- /*!
- * Returns true if the current frame is being used as the alignment reference.
- */
- bool IsCurrentReference() const { return m_current_is_reference; }
-
- /*!
- * Returns true if we need to call UpdateReference now.
- */
- bool NeedReferenceUpdate();
-
- /*!
- * Returns the pointer reference to the alignment reference image data
- */
- unsigned char ** GetReferenceImage() { return m_reference_image; }
-
- /*!
- * Returns the pointer reference to the double array containing the homogeneous coordinates for the matched reference image corners.
- */
- double * GetRefCorners() { return m_corners_ref; }
- /*!
- * Returns the pointer reference to the double array containing the homogeneous coordinates for the matched inspection image corners.
- */
- double * GetInsCorners() { return m_corners_ins; }
- /*!
- * Returns the number of correspondences between the reference and inspection images.
- */
- int GetNrMatches() { return m_nr_matches; }
-
- /*!
- * Returns the number of corners detected in the current reference image.
- */
- int GetNrRefCorners() { return m_nr_corners_ref; }
-
- /*!
- * Returns the pointer to an array of indices that were found to be RANSAC inliers from the matched corner lists.
- */
- int* GetInliers() { return m_inlier_indices; }
-
- /*!
- * Returns the number of inliers from the RANSAC matching step.
- */
- int GetNrInliers() { return m_num_inlier_indices; }
-
- //std::vector<int>& GetInliers();
- //void Polish(std::vector<int> &inlier_indices);
-
- /*!
- * Perform a linear polishing step by re-estimating the alignment transformation using the RANSAC inliers.
- * \param inlier_indices pointer to an array of indices that were found to be RANSAC inliers from the matched corner lists.
- * \param num_inlier_indices number of inliers i.e. the length of the array passed as the first argument.
- */
- void Polish(int *inlier_indices, int &num_inlier_indices);
-
- /*!
- * Reset the motion smoothing parameters to their initial values.
- */
- void ResetMotionSmoothingParameters() { m_stab_smoother.Init(); }
-
- /*!
- * Update the alignment reference image to the specified image.
- * \param im pointer to the image data to be used as the new alignment reference.
- * \param subsample boolean flag to control whether the function should internally subsample the provided image to the size provided in the Init() function.
- */
- int UpdateReference(const unsigned char * const * im, bool subsample = true, bool detect_corners = true);
-
- /*!
- * Returns the transformation from the display reference to the alignment reference frame
- */
- void Get_H_dref_to_ref(double H[9]);
- /*!
- * Returns the transformation from the display reference to the inspection reference frame
- */
- void Get_H_dref_to_ins(double H[9]);
- /*!
- * Set the transformation from the display reference to the inspection reference frame
- * \param H the transformation to set
- */
- void Set_H_dref_to_ins(double H[9]);
-
- /*!
- * Reset the display reference to the current frame.
- */
- void ResetDisplayReference();
-
- /*!
- * Estimate a secondary motion model starting from the specified transformation.
- * \param H the primary motion model to start from
- */
- void EstimateSecondaryModel(double H[9]);
-
- /*!
- *
- */
- void SelectOutliers();
-
- char *profile_string;
-
-protected:
- void Clean();
- void GenerateQuarterResImage(const unsigned char* const * im);
-
- int m_im_width;
- int m_im_height;
-
- // RANSAC and refinement parameters:
- int m_homography_type;
- int m_max_iterations;
- double m_scale;
- int m_nr_samples;
- int m_chunk_size;
- double m_outlier_t2;
-
- // Whether to fit a linear model to just the inliers at the end
- bool m_linear_polish;
- double m_polish_C[36];
- double m_polish_D[6];
-
- // local state
- bool m_current_is_reference;
- bool m_initialized;
-
- // inspection to reference homography:
- double m_H_ref_to_ins[9];
- double m_H_dref_to_ref[9];
-
- // feature extraction and matching:
- db_CornerDetector_u m_cd;
- db_Matcher_u m_cm;
-
- // length of corner arrays:
- unsigned long m_max_nr_corners;
-
- // corner locations of reference image features:
- double * m_x_corners_ref;
- double * m_y_corners_ref;
- int m_nr_corners_ref;
-
- // corner locations of inspection image features:
- double * m_x_corners_ins;
- double * m_y_corners_ins;
- int m_nr_corners_ins;
-
- // length of match index arrays:
- unsigned long m_max_nr_matches;
-
- // match indices:
- int * m_match_index_ref;
- int * m_match_index_ins;
- int m_nr_matches;
-
- // pointer to internal copy of the reference image:
- unsigned char ** m_reference_image;
-
- // pointer to internal copy of last aligned inspection image:
- unsigned char ** m_aligned_ins_image;
-
- // pointer to quarter resolution image, if used.
- unsigned char** m_quarter_res_image;
-
- // temporary storage for the quarter resolution image processing
- unsigned char** m_horz_smooth_subsample_image;
-
- // temporary space for homography computation:
- double * m_temp_double;
- int * m_temp_int;
-
- // homogenous image point arrays:
- double * m_corners_ref;
- double * m_corners_ins;
-
- // Indices of the points within the match lists
- int * m_inlier_indices;
- int m_num_inlier_indices;
-
- //void ComputeInliers(double H[9], std::vector<int> &inlier_indices);
- void ComputeInliers(double H[9]);
-
- // cost arrays:
- void ComputeCostArray();
- bool m_sq_cost_computed;
- double * m_sq_cost;
-
- // cost histogram:
- void ComputeCostHistogram();
- int *m_cost_histogram;
-
- void SetOutlierThreshold();
-
- // utility function for smoothing the motion parameters.
- void SmoothMotion(void);
-
-private:
- double m_K[9];
- const int m_over_allocation;
-
- bool m_reference_set;
-
- // Maximum number of inliers seen until now w.r.t the current reference frame
- int m_max_inlier_count;
-
- // Number of cost histogram bins:
- int m_nr_bins;
- // All costs above this threshold get put into the last bin:
- int m_max_cost_pix;
-
- // whether to quarter the image resolution for processing, or not
- bool m_quarter_resolution;
-
- // the period (in number of frames) for reference update.
- unsigned int m_reference_update_period;
-
- // the number of frames processed so far.
- unsigned int m_nr_frames_processed;
-
- // smoother for motion transformations
- db_StabilizationSmoother m_stab_smoother;
-
- // boolean to control whether motion smoothing occurs (or not)
- bool m_do_motion_smoothing;
-
- // double to set the gain for motion smoothing
- double m_motion_smoothing_gain;
-};
-/*!
- Create look-up tables to undistort images. Only Bougeut (Matlab toolkit)
- is currently supported. Can be used with db_WarpImageLut_u().
- \code
- xd = H*xs;
- xd = xd/xd(3);
- \endcode
- \param lut_x pre-allocated float image
- \param lut_y pre-allocated float image
- \param w width
- \param h height
- \param H image homography from source to destination
- */
-inline void db_GenerateHomographyLut(float ** lut_x,float ** lut_y,int w,int h,const double H[9])
-{
- assert(lut_x && lut_y);
- double x[3] = {0.0,0.0,1.0};
- double xb[3];
-
-/*
- double xl[3];
-
- // Determine the output coordinate system ROI
- double Hinv[9];
- db_InvertAffineTransform(Hinv,H);
- db_Multiply3x3_3x1(xl, Hinv, x);
- xl[0] = db_SafeDivision(xl[0],xl[2]);
- xl[1] = db_SafeDivision(xl[1],xl[2]);
-*/
-
- for ( int i = 0; i < w; ++i )
- for ( int j = 0; j < h; ++j )
- {
- x[0] = double(i);
- x[1] = double(j);
- db_Multiply3x3_3x1(xb, H, x);
- xb[0] = db_SafeDivision(xb[0],xb[2]);
- xb[1] = db_SafeDivision(xb[1],xb[2]);
-
- lut_x[j][i] = float(xb[0]);
- lut_y[j][i] = float(xb[1]);
- }
-}
-
-/*!
- * Perform a look-up table warp for packed RGB ([rgbrgbrgb...]) images.
- * The LUTs must be float images of the same size as source image.
- * The source value x_s is determined from destination (x_d,y_d) through lut_x
- * and y_s is determined from lut_y:
- \code
- x_s = lut_x[y_d][x_d];
- y_s = lut_y[y_d][x_d];
- \endcode
-
- * \param src source image (w*3 by h)
- * \param dst destination image (w*3 by h)
- * \param w width
- * \param h height
- * \param lut_x LUT for x
- * \param lut_y LUT for y
- */
-inline void db_WarpImageLutFast_rgb(const unsigned char * const * src, unsigned char ** dst, int w, int h,
- const float * const * lut_x, const float * const * lut_y)
-{
- assert(src && dst);
- int xd=0, yd=0;
-
- for ( int i = 0; i < w; ++i )
- for ( int j = 0; j < h; ++j )
- {
- xd = static_cast<unsigned int>(lut_x[j][i]);
- yd = static_cast<unsigned int>(lut_y[j][i]);
- if ( xd >= w || yd >= h ||
- xd < 0 || yd < 0)
- {
- dst[j][3*i ] = 0;
- dst[j][3*i+1] = 0;
- dst[j][3*i+2] = 0;
- }
- else
- {
- dst[j][3*i ] = src[yd][3*xd ];
- dst[j][3*i+1] = src[yd][3*xd+1];
- dst[j][3*i+2] = src[yd][3*xd+2];
- }
- }
-}
-
-inline unsigned char db_BilinearInterpolationRGB(double y, double x, const unsigned char * const * v, int offset)
-{
- int floor_x=(int) x;
- int floor_y=(int) y;
-
- int ceil_x=floor_x+1;
- int ceil_y=floor_y+1;
-
- unsigned char f00 = v[floor_y][3*floor_x+offset];
- unsigned char f01 = v[floor_y][3*ceil_x+offset];
- unsigned char f10 = v[ceil_y][3*floor_x+offset];
- unsigned char f11 = v[ceil_y][3*ceil_x+offset];
-
- double xl = x-floor_x;
- double yl = y-floor_y;
-
- return (unsigned char)(f00*(1-yl)*(1-xl) + f10*yl*(1-xl) + f01*(1-yl)*xl + f11*yl*xl);
-}
-
-inline void db_WarpImageLutBilinear_rgb(const unsigned char * const * src, unsigned char ** dst, int w, int h,
- const float * const * lut_x, const float * const * lut_y)
-{
- assert(src && dst);
- double xd=0.0, yd=0.0;
-
- for ( int i = 0; i < w; ++i )
- for ( int j = 0; j < h; ++j )
- {
- xd = static_cast<double>(lut_x[j][i]);
- yd = static_cast<double>(lut_y[j][i]);
- if ( xd > w-2 || yd > h-2 ||
- xd < 0.0 || yd < 0.0)
- {
- dst[j][3*i ] = 0;
- dst[j][3*i+1] = 0;
- dst[j][3*i+2] = 0;
- }
- else
- {
- dst[j][3*i ] = db_BilinearInterpolationRGB(yd,xd,src,0);
- dst[j][3*i+1] = db_BilinearInterpolationRGB(yd,xd,src,1);
- dst[j][3*i+2] = db_BilinearInterpolationRGB(yd,xd,src,2);
- }
- }
-}
-
-inline double SquaredInhomogenousHomographyError(double y[3],double H[9],double x[3]){
- double x0,x1,x2,mult;
- double sd;
-
- x0=H[0]*x[0]+H[1]*x[1]+H[2];
- x1=H[3]*x[0]+H[4]*x[1]+H[5];
- x2=H[6]*x[0]+H[7]*x[1]+H[8];
- mult=1.0/((x2!=0.0)?x2:1.0);
- sd=(y[0]-x0*mult)*(y[0]-x0*mult)+(y[1]-x1*mult)*(y[1]-x1*mult);
-
- return(sd);
-}
-
-
-// functions related to profiling
-#if PROFILE
-
-/* return current time in milliseconds */
-static double
-now_ms(void)
-{
- //struct timespec res;
- struct timeval res;
- //clock_gettime(CLOCK_REALTIME, &res);
- gettimeofday(&res, NULL);
- return 1000.0*res.tv_sec + (double)res.tv_usec/1e3;
-}
-
-#endif
diff --git a/jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.cpp b/jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.cpp
deleted file mode 100644
index dffff8ab1..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.cpp
+++ /dev/null
@@ -1,330 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <stdlib.h>
-#include "dbstabsmooth.h"
-
-///// TODO TODO ////////// Replace this with the actual definition from Jayan's reply /////////////
-#define vp_copy_motion_no_id vp_copy_motion
-///////////////////////////////////////////////////////////////////////////////////////////////////
-
-static bool vpmotion_add(VP_MOTION *in1, VP_MOTION *in2, VP_MOTION *out);
-static bool vpmotion_multiply(VP_MOTION *in1, double factor, VP_MOTION *out);
-
-db_StabilizationSmoother::db_StabilizationSmoother()
-{
- Init();
-}
-
-void db_StabilizationSmoother::Init()
-{
- f_smoothOn = true;
- f_smoothReset = false;
- f_smoothFactor = 1.0f;
- f_minDampingFactor = 0.2f;
- f_zoom = 1.0f;
- VP_MOTION_ID(f_motLF);
- VP_MOTION_ID(f_imotLF);
- f_hsize = 0;
- f_vsize = 0;
-
- VP_MOTION_ID(f_disp_mot);
- VP_MOTION_ID(f_src_mot);
- VP_MOTION_ID(f_diff_avg);
-
- for( int i = 0; i < MOTION_ARRAY-1; i++) {
- VP_MOTION_ID(f_hist_mot_speed[i]);
- VP_MOTION_ID(f_hist_mot[i]);
- VP_MOTION_ID(f_hist_diff_mot[i]);
- }
- VP_MOTION_ID(f_hist_mot[MOTION_ARRAY-1]);
-
-}
-
-db_StabilizationSmoother::~db_StabilizationSmoother()
-{}
-
-
-bool db_StabilizationSmoother::smoothMotion(VP_MOTION *inmot, VP_MOTION *outmot)
-{
- VP_MOTION_ID(f_motLF);
- VP_MOTION_ID(f_imotLF);
- f_motLF.insid = inmot->refid;
- f_motLF.refid = inmot->insid;
-
- if(f_smoothOn) {
- if(!f_smoothReset) {
- MXX(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MXX(f_motLF) + (1.0-f_smoothFactor)* (double) MXX(*inmot));
- MXY(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MXY(f_motLF) + (1.0-f_smoothFactor)* (double) MXY(*inmot));
- MXZ(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MXZ(f_motLF) + (1.0-f_smoothFactor)* (double) MXZ(*inmot));
- MXW(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MXW(f_motLF) + (1.0-f_smoothFactor)* (double) MXW(*inmot));
-
- MYX(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MYX(f_motLF) + (1.0-f_smoothFactor)* (double) MYX(*inmot));
- MYY(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MYY(f_motLF) + (1.0-f_smoothFactor)* (double) MYY(*inmot));
- MYZ(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MYZ(f_motLF) + (1.0-f_smoothFactor)* (double) MYZ(*inmot));
- MYW(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MYW(f_motLF) + (1.0-f_smoothFactor)* (double) MYW(*inmot));
-
- MZX(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MZX(f_motLF) + (1.0-f_smoothFactor)* (double) MZX(*inmot));
- MZY(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MZY(f_motLF) + (1.0-f_smoothFactor)* (double) MZY(*inmot));
- MZZ(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MZZ(f_motLF) + (1.0-f_smoothFactor)* (double) MZZ(*inmot));
- MZW(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MZW(f_motLF) + (1.0-f_smoothFactor)* (double) MZW(*inmot));
-
- MWX(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MWX(f_motLF) + (1.0-f_smoothFactor)* (double) MWX(*inmot));
- MWY(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MWY(f_motLF) + (1.0-f_smoothFactor)* (double) MWY(*inmot));
- MWZ(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MWZ(f_motLF) + (1.0-f_smoothFactor)* (double) MWZ(*inmot));
- MWW(f_motLF) = (VP_PAR) (f_smoothFactor*(double) MWW(f_motLF) + (1.0-f_smoothFactor)* (double) MWW(*inmot));
- }
- else
- vp_copy_motion_no_id(inmot, &f_motLF); // f_smoothFactor = 0.0
-
- // Only allow LF motion to be compensated. Remove HF motion from
- // the output transformation
- if(!vp_invert_motion(&f_motLF, &f_imotLF))
- return false;
-
- if(!vp_cascade_motion(&f_imotLF, inmot, outmot))
- return false;
- }
- else {
- vp_copy_motion_no_id(inmot, outmot);
- }
-
- return true;
-}
-
-bool db_StabilizationSmoother::smoothMotionAdaptive(/*VP_BIMG *bimg,*/int hsize, int vsize, VP_MOTION *inmot, VP_MOTION *outmot)
-{
- VP_MOTION tmpMotion, testMotion;
- VP_PAR p1x, p2x, p3x, p4x;
- VP_PAR p1y, p2y, p3y, p4y;
- double smoothFactor;
- double minSmoothFactor = f_minDampingFactor;
-
-// int hsize = bimg->w;
-// int vsize = bimg->h;
- double border_factor = 0.01;//0.2;
- double border_x = border_factor * hsize;
- double border_y = border_factor * vsize;
-
- VP_MOTION_ID(f_motLF);
- VP_MOTION_ID(f_imotLF);
- VP_MOTION_ID(testMotion);
- VP_MOTION_ID(tmpMotion);
-
- if (f_smoothOn) {
- VP_MOTION identityMotion;
- VP_MOTION_ID(identityMotion); // initialize the motion
- vp_copy_motion(inmot/*in*/, &testMotion/*out*/);
- VP_PAR delta = vp_motion_cornerdiff(&testMotion, &identityMotion, 0, 0,(int)hsize, (int)vsize);
-
- smoothFactor = 0.99 - 0.0015 * delta;
-
- if(smoothFactor < minSmoothFactor)
- smoothFactor = minSmoothFactor;
-
- // Find the amount of motion that must be compensated so that no "border" pixels are seen in the stable video
- for (smoothFactor = smoothFactor; smoothFactor >= minSmoothFactor; smoothFactor -= 0.01) {
- // Compute the smoothed motion
- if(!smoothMotion(inmot, &tmpMotion, smoothFactor))
- break;
-
- // TmpMotion, or Qsi where s is the smoothed display reference and i is the
- // current image, tells us how points in the S co-ordinate system map to
- // points in the I CS. We would like to check whether the four corners of the
- // warped and smoothed display reference lies entirely within the I co-ordinate
- // system. If yes, then the amount of smoothing is sufficient so that NO
- // border pixels are seen at the output. We test for f_smoothFactor terms
- // between 0.9 and 1.0, in steps of 0.01, and between 0.5 ands 0.9 in steps of 0.1
-
- (void) vp_zoom_motion2d(&tmpMotion, &testMotion, 1, hsize, vsize, (double)f_zoom); // needs to return bool
-
- VP_WARP_POINT_2D(0, 0, testMotion, p1x, p1y);
- VP_WARP_POINT_2D(hsize - 1, 0, testMotion, p2x, p2y);
- VP_WARP_POINT_2D(hsize - 1, vsize - 1, testMotion, p3x, p3y);
- VP_WARP_POINT_2D(0, vsize - 1, testMotion, p4x, p4y);
-
- if (!is_point_in_rect((double)p1x,(double)p1y,-border_x,-border_y,(double)(hsize+2.0*border_x),(double)(vsize+2.0*border_y))) {
- continue;
- }
- if (!is_point_in_rect((double)p2x, (double)p2y,-border_x,-border_y,(double)(hsize+2.0*border_x),(double)(vsize+2.0*border_y))) {
- continue;
- }
- if (!is_point_in_rect((double)p3x,(double)p3y,-border_x,-border_y,(double)(hsize+2.0*border_x),(double)(vsize+2.0*border_y))) {
- continue;
- }
- if (!is_point_in_rect((double)p4x, (double)p4y,-border_x,-border_y,(double)(hsize+2.0*border_x),(double)(vsize+2.0*border_y))) {
- continue;
- }
-
- // If we get here, then all the points are in the rectangle.
- // Therefore, break out of this loop
- break;
- }
-
- // if we get here and f_smoothFactor <= fMinDampingFactor, reset the stab reference
- if (smoothFactor < f_minDampingFactor)
- smoothFactor = f_minDampingFactor;
-
- // use the smoothed motion for stabilization
- vp_copy_motion_no_id(&tmpMotion/*in*/, outmot/*out*/);
- }
- else
- {
- vp_copy_motion_no_id(inmot, outmot);
- }
-
- return true;
-}
-
-bool db_StabilizationSmoother::smoothMotion(VP_MOTION *inmot, VP_MOTION *outmot, double smooth_factor)
-{
- f_motLF.insid = inmot->refid;
- f_motLF.refid = inmot->insid;
-
- if(f_smoothOn) {
- if(!f_smoothReset) {
- MXX(f_motLF) = (VP_PAR) (smooth_factor*(double) MXX(f_motLF) + (1.0-smooth_factor)* (double) MXX(*inmot));
- MXY(f_motLF) = (VP_PAR) (smooth_factor*(double) MXY(f_motLF) + (1.0-smooth_factor)* (double) MXY(*inmot));
- MXZ(f_motLF) = (VP_PAR) (smooth_factor*(double) MXZ(f_motLF) + (1.0-smooth_factor)* (double) MXZ(*inmot));
- MXW(f_motLF) = (VP_PAR) (smooth_factor*(double) MXW(f_motLF) + (1.0-smooth_factor)* (double) MXW(*inmot));
-
- MYX(f_motLF) = (VP_PAR) (smooth_factor*(double) MYX(f_motLF) + (1.0-smooth_factor)* (double) MYX(*inmot));
- MYY(f_motLF) = (VP_PAR) (smooth_factor*(double) MYY(f_motLF) + (1.0-smooth_factor)* (double) MYY(*inmot));
- MYZ(f_motLF) = (VP_PAR) (smooth_factor*(double) MYZ(f_motLF) + (1.0-smooth_factor)* (double) MYZ(*inmot));
- MYW(f_motLF) = (VP_PAR) (smooth_factor*(double) MYW(f_motLF) + (1.0-smooth_factor)* (double) MYW(*inmot));
-
- MZX(f_motLF) = (VP_PAR) (smooth_factor*(double) MZX(f_motLF) + (1.0-smooth_factor)* (double) MZX(*inmot));
- MZY(f_motLF) = (VP_PAR) (smooth_factor*(double) MZY(f_motLF) + (1.0-smooth_factor)* (double) MZY(*inmot));
- MZZ(f_motLF) = (VP_PAR) (smooth_factor*(double) MZZ(f_motLF) + (1.0-smooth_factor)* (double) MZZ(*inmot));
- MZW(f_motLF) = (VP_PAR) (smooth_factor*(double) MZW(f_motLF) + (1.0-smooth_factor)* (double) MZW(*inmot));
-
- MWX(f_motLF) = (VP_PAR) (smooth_factor*(double) MWX(f_motLF) + (1.0-smooth_factor)* (double) MWX(*inmot));
- MWY(f_motLF) = (VP_PAR) (smooth_factor*(double) MWY(f_motLF) + (1.0-smooth_factor)* (double) MWY(*inmot));
- MWZ(f_motLF) = (VP_PAR) (smooth_factor*(double) MWZ(f_motLF) + (1.0-smooth_factor)* (double) MWZ(*inmot));
- MWW(f_motLF) = (VP_PAR) (smooth_factor*(double) MWW(f_motLF) + (1.0-smooth_factor)* (double) MWW(*inmot));
- }
- else
- vp_copy_motion_no_id(inmot, &f_motLF); // smooth_factor = 0.0
-
- // Only allow LF motion to be compensated. Remove HF motion from
- // the output transformation
- if(!vp_invert_motion(&f_motLF, &f_imotLF))
- return false;
-
- if(!vp_cascade_motion(&f_imotLF, inmot, outmot))
- return false;
- }
- else {
- vp_copy_motion_no_id(inmot, outmot);
- }
-
- return true;
-}
-
-//! Overloaded smoother function that takes in user-specidied smoothing factor
-bool
-db_StabilizationSmoother::smoothMotion1(VP_MOTION *inmot, VP_MOTION *outmot, VP_MOTION *motLF, VP_MOTION *imotLF, double factor)
-{
-
- if(!f_smoothOn) {
- vp_copy_motion(inmot, outmot);
- return true;
- }
- else {
- if(!f_smoothReset) {
- MXX(*motLF) = (VP_PAR) (factor*(double) MXX(*motLF) + (1.0-factor)* (double) MXX(*inmot));
- MXY(*motLF) = (VP_PAR) (factor*(double) MXY(*motLF) + (1.0-factor)* (double) MXY(*inmot));
- MXZ(*motLF) = (VP_PAR) (factor*(double) MXZ(*motLF) + (1.0-factor)* (double) MXZ(*inmot));
- MXW(*motLF) = (VP_PAR) (factor*(double) MXW(*motLF) + (1.0-factor)* (double) MXW(*inmot));
-
- MYX(*motLF) = (VP_PAR) (factor*(double) MYX(*motLF) + (1.0-factor)* (double) MYX(*inmot));
- MYY(*motLF) = (VP_PAR) (factor*(double) MYY(*motLF) + (1.0-factor)* (double) MYY(*inmot));
- MYZ(*motLF) = (VP_PAR) (factor*(double) MYZ(*motLF) + (1.0-factor)* (double) MYZ(*inmot));
- MYW(*motLF) = (VP_PAR) (factor*(double) MYW(*motLF) + (1.0-factor)* (double) MYW(*inmot));
-
- MZX(*motLF) = (VP_PAR) (factor*(double) MZX(*motLF) + (1.0-factor)* (double) MZX(*inmot));
- MZY(*motLF) = (VP_PAR) (factor*(double) MZY(*motLF) + (1.0-factor)* (double) MZY(*inmot));
- MZZ(*motLF) = (VP_PAR) (factor*(double) MZZ(*motLF) + (1.0-factor)* (double) MZZ(*inmot));
- MZW(*motLF) = (VP_PAR) (factor*(double) MZW(*motLF) + (1.0-factor)* (double) MZW(*inmot));
-
- MWX(*motLF) = (VP_PAR) (factor*(double) MWX(*motLF) + (1.0-factor)* (double) MWX(*inmot));
- MWY(*motLF) = (VP_PAR) (factor*(double) MWY(*motLF) + (1.0-factor)* (double) MWY(*inmot));
- MWZ(*motLF) = (VP_PAR) (factor*(double) MWZ(*motLF) + (1.0-factor)* (double) MWZ(*inmot));
- MWW(*motLF) = (VP_PAR) (factor*(double) MWW(*motLF) + (1.0-factor)* (double) MWW(*inmot));
- }
- else {
- vp_copy_motion(inmot, motLF);
- }
- // Only allow LF motion to be compensated. Remove HF motion from the output transformation
- if(!vp_invert_motion(motLF, imotLF)) {
-#if DEBUG_PRINT
- printfOS("Invert failed \n");
-#endif
- return false;
- }
- if(!vp_cascade_motion(imotLF, inmot, outmot)) {
-#if DEBUG_PRINT
- printfOS("cascade failed \n");
-#endif
- return false;
- }
- }
- return true;
-}
-
-
-
-
-bool db_StabilizationSmoother::is_point_in_rect(double px, double py, double rx, double ry, double w, double h)
-{
- if (px < rx)
- return(false);
- if (px >= rx + w)
- return(false);
- if (py < ry)
- return(false);
- if (py >= ry + h)
- return(false);
-
- return(true);
-}
-
-
-
-static bool vpmotion_add(VP_MOTION *in1, VP_MOTION *in2, VP_MOTION *out)
-{
- int i;
- if(in1 == NULL || in2 == NULL || out == NULL)
- return false;
-
- for(i = 0; i < VP_MAX_MOTION_PAR; i++)
- out->par[i] = in1->par[i] + in2->par[i];
-
- return true;
-}
-
-static bool vpmotion_multiply(VP_MOTION *in1, double factor, VP_MOTION *out)
-{
- int i;
- if(in1 == NULL || out == NULL)
- return false;
-
- for(i = 0; i < VP_MAX_MOTION_PAR; i++)
- out->par[i] = in1->par[i] * factor;
-
- return true;
-}
-
diff --git a/jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.h b/jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.h
deleted file mode 100644
index f03546ef6..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/dbstabsmooth.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-
-#ifdef _WIN32
-#ifdef DBREG_EXPORTS
-#define DBREG_API __declspec(dllexport)
-#else
-#define DBREG_API __declspec(dllimport)
-#endif
-#else
-#define DBREG_API
-#endif
-
-extern "C" {
-#include "vp_motionmodel.h"
-}
-
-#define MOTION_ARRAY 5
-
-
-/*!
- * Performs smoothing on the motion estimate from feature_stab.
- */
-class DBREG_API db_StabilizationSmoother
-{
-public:
- db_StabilizationSmoother();
- ~db_StabilizationSmoother();
-
- /*!
- * Initialize parameters for stab-smoother.
- */
- void Init();
-
- //! Smothing type
- typedef enum {
- SimpleSmooth = 0, //!< simple smooth
- AdaptSmooth = 1, //!< adaptive smooth
- PanSmooth = 2 //!< pan motion smooth
- } SmoothType;
-
- /*!
- * Smooth-motion is to do a weight-average between the current affine and
- * motLF. The way to change the affine is only for the display purpose.
- * It removes the high frequency motion and keep the low frequency motion
- * to the display. IIR implmentation.
- * \param inmot input motion parameters
- * \param outmot smoothed output motion parameters
- */
- bool smoothMotion(VP_MOTION *inmot, VP_MOTION *outmot);
-
- /*!
- * The adaptive smoothing version of the above fixed smoothing function.
- * \param hsize width of the image being aligned
- * \param vsize height of the image being aligned
- * \param inmot input motion parameters
- * \param outmot smoothed output motion parameters
- */
- bool smoothMotionAdaptive(/*VP_BIMG *bimg,*/int hsize, int vsize, VP_MOTION *inmot, VP_MOTION *outmot);
- bool smoothPanMotion_1(VP_MOTION *inmot, VP_MOTION *outmot);
- bool smoothPanMotion_2(VP_MOTION *inmot, VP_MOTION *outmot);
-
- /*!
- * Set the smoothing factor for the stab-smoother.
- * \param factor the factor value to set
- */
- inline void setSmoothingFactor(float factor) { f_smoothFactor = factor; }
-
- /*!
- * Reset smoothing
- */
- inline void resetSmoothing(bool flag) { f_smoothReset = flag; }
- /*!
- * Set the zoom factor value.
- * \param zoom the value to set to
- */
- inline void setZoomFactor(float zoom) { f_zoom = zoom; }
- /*!
- * Set the minimum damping factor value.
- * \param factor the value to set to
- */
- inline void setminDampingFactor(float factor) { f_minDampingFactor = factor; }
-
- /*!
- * Returns the current smoothing factor.
- */
- inline float getSmoothingFactor(void) { return f_smoothFactor; }
- /*!
- * Returns the current zoom factor.
- */
- inline float getZoomFactor(void) { return f_zoom; }
- /*!
- * Returns the current minimum damping factor.
- */
- inline float getminDampingFactor(void) { return f_minDampingFactor; }
- /*!
- * Returns the current state of the smoothing reset flag.
- */
- inline bool getSmoothReset(void) { return f_smoothReset; }
- /*!
- * Returns the current low frequency motion parameters.
- */
- inline VP_MOTION getMotLF(void) { return f_motLF; }
- /*!
- * Returns the inverse of the current low frequency motion parameters.
- */
- inline VP_MOTION getImotLF(void) { return f_imotLF; }
- /*!
- * Set the dimensions of the alignment image.
- * \param hsize width of the image
- * \param vsize height of the image
- */
- inline void setSize(int hsize, int vsize) { f_hsize = hsize; f_vsize = vsize; }
-
-protected:
-
- bool smoothMotion(VP_MOTION *inmot, VP_MOTION *outmot, double smooth_factor);
- bool smoothMotion1(VP_MOTION *inmot, VP_MOTION *outmot, VP_MOTION *motLF, VP_MOTION *imotLF, double smooth_factor);
- void iterativeSmooth(VP_MOTION *input, VP_MOTION *output, double border_factor);
- bool is_point_in_rect(double px, double py, double rx, double ry, double w, double h);
-
-
-private:
- int f_hsize;
- int f_vsize;
- bool f_smoothOn;
- bool f_smoothReset;
- float f_smoothFactor;
- float f_minDampingFactor;
- float f_zoom;
- VP_MOTION f_motLF;
- VP_MOTION f_imotLF;
- VP_MOTION f_hist_mot[MOTION_ARRAY];
- VP_MOTION f_hist_mot_speed[MOTION_ARRAY-1];
- VP_MOTION f_hist_diff_mot[MOTION_ARRAY-1];
- VP_MOTION f_disp_mot;
- VP_MOTION f_src_mot;
- VP_MOTION f_diff_avg;
-
-};
-
diff --git a/jni_mosaic/feature_stab/src/dbreg/targetver.h b/jni_mosaic/feature_stab/src/dbreg/targetver.h
deleted file mode 100644
index 3ca3e8792..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/targetver.h
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-// The following macros define the minimum required platform. The minimum required platform
-// is the earliest version of Windows, Internet Explorer etc. that has the necessary features to run
-// your application. The macros work by enabling all features available on platform versions up to and
-// including the version specified.
-
-// Modify the following defines if you have to target a platform prior to the ones specified below.
-// Refer to MSDN for the latest info on corresponding values for different platforms.
-#ifndef WINVER // Specifies that the minimum required platform is Windows Vista.
-#define WINVER 0x0600 // Change this to the appropriate value to target other versions of Windows.
-#endif
-
-#ifndef _WIN32_WINNT // Specifies that the minimum required platform is Windows Vista.
-#define _WIN32_WINNT 0x0600 // Change this to the appropriate value to target other versions of Windows.
-#endif
-
-#ifndef _WIN32_WINDOWS // Specifies that the minimum required platform is Windows 98.
-#define _WIN32_WINDOWS 0x0410 // Change this to the appropriate value to target Windows Me or later.
-#endif
-
-#ifndef _WIN32_IE // Specifies that the minimum required platform is Internet Explorer 7.0.
-#define _WIN32_IE 0x0700 // Change this to the appropriate value to target other versions of IE.
-#endif
diff --git a/jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.c b/jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.c
deleted file mode 100644
index 1f6af15bd..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.c
+++ /dev/null
@@ -1,377 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
-#sourcefile vpmotion/vp_motionmodel.c
-#category motion-model
-*
-* Copyright 1998 Sarnoff Corporation
-* All Rights Reserved
-*
-* Modification History
-* Date: 02/14/98
-* Author: supuns
-* Shop Order: 17xxx
-* @(#) $Id: vp_motionmodel.c,v 1.4 2011/06/17 14:04:33 mbansal Exp $
-*/
-
-/*
-* ===================================================================
-* Include Files
-*/
-
-#include <string.h> /* memmove */
-#include <math.h>
-#include "vp_motionmodel.h"
-
-/* Static Functions */
-static
-double Det3(double m[3][3])
-{
- double result;
-
- result =
- m[0][0]*m[1][1]*m[2][2] + m[0][1]*m[1][2]*m[2][0] +
- m[0][2]*m[1][0]*m[2][1] - m[0][2]*m[1][1]*m[2][0] -
- m[0][0]*m[1][2]*m[2][1] - m[0][1]*m[1][0]*m[2][2];
-
- return(result);
-}
-
-typedef double MATRIX[4][4];
-
-static
-double Det4(MATRIX m)
-{
- /* ==> This is a poor implementation of determinant.
- Writing the formula out in closed form is unnecessarily complicated
- and mistakes are easy to make. */
- double result;
-
- result=
- m[0][3] *m[1][2] *m[2][1] *m[3][0] - m[0][2] *m[1][3] *m[2][1] *m[3][0] - m[0][3] *m[1][1] *m[2][2] *m[3][0] +
- m[0][1] *m[1][3] *m[2][2] *m[3][0] + m[0][2] *m[1][1] *m[2][3] *m[3][0] - m[0][1] *m[1][2] *m[2][3] *m[3][0] - m[0][3] *m[1][2] *m[2][0] *m[3][1] +
- m[0][2] *m[1][3] *m[2][0] *m[3][1] + m[0][3] *m[1][0] *m[2][2] *m[3][1] - m[0][0] *m[1][3] *m[2][2] *m[3][1] - m[0][2] *m[1][0] *m[2][3] *m[3][1] +
- m[0][0] *m[1][2] *m[2][3] *m[3][1] + m[0][3] *m[1][1] *m[2][0] *m[3][2] - m[0][1] *m[1][3] *m[2][0] *m[3][2] - m[0][3] *m[1][0] *m[2][1] *m[3][2] +
- m[0][0] *m[1][3] *m[2][1] *m[3][2] + m[0][1] *m[1][0] *m[2][3] *m[3][2] - m[0][0] *m[1][1] *m[2][3] *m[3][2] - m[0][2] *m[1][1] *m[2][0] *m[3][3] +
- m[0][1] *m[1][2] *m[2][0] *m[3][3] + m[0][2] *m[1][0] *m[2][1] *m[3][3] - m[0][0] *m[1][2] *m[2][1] *m[3][3] - m[0][1] *m[1][0] *m[2][2] *m[3][3] +
- m[0][0] *m[1][1] *m[2][2] *m[3][3];
- /*
- m[0][0]*m[1][1]*m[2][2]*m[3][3]-m[0][1]*m[1][0]*m[2][2]*m[3][3]+
- m[0][1]*m[1][2]*m[2][0]*m[3][3]-m[0][2]*m[1][1]*m[2][0]*m[3][3]+
- m[0][2]*m[1][0]*m[2][1]*m[3][3]-m[0][0]*m[1][2]*m[2][1]*m[3][3]+
- m[0][0]*m[1][2]*m[2][3]*m[3][1]-m[0][2]*m[1][0]*m[2][3]*m[3][1]+
- m[0][2]*m[1][3]*m[2][0]*m[3][1]-m[0][3]*m[1][2]*m[2][0]*m[3][1]+
- m[0][3]*m[1][0]*m[2][2]*m[3][1]-m[0][0]*m[1][3]*m[2][2]*m[3][1]+
- m[0][0]*m[1][3]*m[2][1]*m[3][2]-m[0][3]*m[1][0]*m[2][3]*m[3][2]+
- m[0][1]*m[1][0]*m[2][3]*m[3][2]-m[0][0]*m[1][1]*m[2][0]*m[3][2]+
- m[0][3]*m[1][1]*m[2][0]*m[3][2]-m[0][1]*m[1][3]*m[2][1]*m[3][2]+
- m[0][1]*m[1][3]*m[2][2]*m[3][0]-m[0][3]*m[1][1]*m[2][2]*m[3][0]+
- m[0][2]*m[1][1]*m[2][3]*m[3][0]-m[0][1]*m[1][2]*m[2][3]*m[3][0]+
- m[0][3]*m[1][2]*m[2][1]*m[3][0]-m[0][2]*m[1][3]*m[2][1]*m[3][0];
- */
- return(result);
-}
-
-static
-int inv4Mat(const VP_MOTION* in, VP_MOTION* out)
-{
- /* ==> This is a poor implementation of inversion. The determinant
- method is O(N^4), i.e. unnecessarily slow, and not numerically accurate.
- The real complexity of inversion is O(N^3), and is best done using
- LU decomposition. */
-
- MATRIX inmat,outmat;
- int i, j, k, l, m, n,ntemp;
- double mat[3][3], indet, temp;
-
- /* check for non-empty structures structure */
- if (((VP_MOTION *) NULL == in) || ((VP_MOTION *) NULL == out)) {
- return 1;
- }
-
- for(k=0,i=0;i<4;i++)
- for(j=0;j<4;j++,k++)
- inmat[i][j]=(double)in->par[k];
-
- indet = Det4(inmat);
- if (indet==0) return(-1);
-
- for (i=0;i<4;i++) {
- for (j=0;j<4;j++) {
- m = 0;
- for (k=0;k<4;k++) {
- if (i != k) {
- n = 0;
- for (l=0;l<4;l++)
- if (j != l) {
- mat[m][n] = inmat[k][l];
- n++;
- }
- m++;
- }
- }
-
- temp = -1.;
- ntemp = (i +j ) %2;
- if( ntemp == 0) temp = 1.;
-
- outmat[j][i] = temp * Det3(mat)/indet;
- }
- }
-
- for(k=0,i=0;i<4;i++)
- for(j=0;j<4;j++,k++)
- out->par[k]=(VP_PAR)outmat[i][j]; /*lint !e771*/
-
- return(0);
-}
-
-/*
-* ===================================================================
-* Public Functions
-#htmlstart
-*/
-
-/*
- * ===================================================================
-#fn vp_invert_motion
-#ft invert a motion
-#fd DEFINITION
- Bool
- vp_invert_motion(const VP_MOTION* in,VP_MOTION* out)
-#fd PURPOSE
- This inverts the motion given in 'in'.
- All motion models upto VP_MOTION_SEMI_PROJ_3D are supported.
- It is assumed that the all 16 parameters are properly
- initialized although you may not be using them. You could
- use the VP_KEEP_ macro's defined in vp_motionmodel.h to set
- the un-initialized parameters. This uses a 4x4 matrix invertion
- function internally.
- It is SAFE to pass the same pointer as both the 'in' and 'out'
- parameters.
-#fd INPUTS
- in - input motion
-#fd OUTPUTS
- out - output inverted motion. If singular matrix uninitialized.
- if MWW(in) is non-zero it is also normalized.
-#fd RETURNS
- FALSE - matrix is singular or motion model not supported
- TRUE - otherwise
-#fd SIDE EFFECTS
- None
-#endfn
-*/
-
-int vp_invert_motion(const VP_MOTION* in,VP_MOTION* out)
-{
- int refid;
-
- /* check for non-empty structures structure */
- if (((VP_MOTION *) NULL == in) || ((VP_MOTION *) NULL == out)) {
- return FALSE;
- }
-
- if (in->type>VP_MOTION_SEMI_PROJ_3D) {
- return FALSE;
- }
-
- if (inv4Mat(in,out)<0)
- return FALSE;
-
- /*VP_NORMALIZE(*out);*/
- out->type = in->type;
- refid=in->refid;
- out->refid=in->insid;
- out->insid=refid;
- return TRUE;
-}
-
-/*
-* ===================================================================
-#fn vp_cascade_motion
-#ft Cascade two motion transforms
-#fd DEFINITION
- Bool
- vp_cascade_motion(const VP_MOTION* InAB,const VP_MOTION* InBC,VP_MOTION* OutAC)
-#fd PURPOSE
- Given Motion Transforms A->B and B->C, this function will
- generate a New Motion that describes the transformation
- from A->C.
- More specifically, OutAC = InBC * InAC.
- This function works ok if InAB,InBC and OutAC are the same pointer.
-#fd INPUTS
- InAB - First Motion Transform
- InBC - Second Motion Tranform
-#fd OUTPUTS
- OutAC - Cascaded Motion
-#fd RETURNS
- FALSE - motion model not supported
- TRUE - otherwise
-#fd SIDE EFFECTS
- None
-#endfn
-*/
-
-int vp_cascade_motion(const VP_MOTION* InA, const VP_MOTION* InB,VP_MOTION* Out)
-{
- /* ==> This is a poor implementation of matrix multiplication.
- Writing the formula out in closed form is unnecessarily complicated
- and mistakes are easy to make. */
- VP_PAR mxx,mxy,mxz,mxw;
- VP_PAR myx,myy,myz,myw;
- VP_PAR mzx,mzy,mzz,mzw;
- VP_PAR mwx,mwy,mwz,mww;
-
- /* check for non-empty structures structure */
- if (((VP_MOTION *) NULL == InA) || ((VP_MOTION *) NULL == InB) ||
- ((VP_MOTION *) NULL == Out)) {
- return FALSE;
- }
-
- if (InA->type>VP_MOTION_PROJ_3D) {
- return FALSE;
- }
-
- if (InB->type>VP_MOTION_PROJ_3D) {
- return FALSE;
- }
-
- mxx = MXX(*InB)*MXX(*InA)+MXY(*InB)*MYX(*InA)+MXZ(*InB)*MZX(*InA)+MXW(*InB)*MWX(*InA);
- mxy = MXX(*InB)*MXY(*InA)+MXY(*InB)*MYY(*InA)+MXZ(*InB)*MZY(*InA)+MXW(*InB)*MWY(*InA);
- mxz = MXX(*InB)*MXZ(*InA)+MXY(*InB)*MYZ(*InA)+MXZ(*InB)*MZZ(*InA)+MXW(*InB)*MWZ(*InA);
- mxw = MXX(*InB)*MXW(*InA)+MXY(*InB)*MYW(*InA)+MXZ(*InB)*MZW(*InA)+MXW(*InB)*MWW(*InA);
- myx = MYX(*InB)*MXX(*InA)+MYY(*InB)*MYX(*InA)+MYZ(*InB)*MZX(*InA)+MYW(*InB)*MWX(*InA);
- myy = MYX(*InB)*MXY(*InA)+MYY(*InB)*MYY(*InA)+MYZ(*InB)*MZY(*InA)+MYW(*InB)*MWY(*InA);
- myz = MYX(*InB)*MXZ(*InA)+MYY(*InB)*MYZ(*InA)+MYZ(*InB)*MZZ(*InA)+MYW(*InB)*MWZ(*InA);
- myw = MYX(*InB)*MXW(*InA)+MYY(*InB)*MYW(*InA)+MYZ(*InB)*MZW(*InA)+MYW(*InB)*MWW(*InA);
- mzx = MZX(*InB)*MXX(*InA)+MZY(*InB)*MYX(*InA)+MZZ(*InB)*MZX(*InA)+MZW(*InB)*MWX(*InA);
- mzy = MZX(*InB)*MXY(*InA)+MZY(*InB)*MYY(*InA)+MZZ(*InB)*MZY(*InA)+MZW(*InB)*MWY(*InA);
- mzz = MZX(*InB)*MXZ(*InA)+MZY(*InB)*MYZ(*InA)+MZZ(*InB)*MZZ(*InA)+MZW(*InB)*MWZ(*InA);
- mzw = MZX(*InB)*MXW(*InA)+MZY(*InB)*MYW(*InA)+MZZ(*InB)*MZW(*InA)+MZW(*InB)*MWW(*InA);
- mwx = MWX(*InB)*MXX(*InA)+MWY(*InB)*MYX(*InA)+MWZ(*InB)*MZX(*InA)+MWW(*InB)*MWX(*InA);
- mwy = MWX(*InB)*MXY(*InA)+MWY(*InB)*MYY(*InA)+MWZ(*InB)*MZY(*InA)+MWW(*InB)*MWY(*InA);
- mwz = MWX(*InB)*MXZ(*InA)+MWY(*InB)*MYZ(*InA)+MWZ(*InB)*MZZ(*InA)+MWW(*InB)*MWZ(*InA);
- mww = MWX(*InB)*MXW(*InA)+MWY(*InB)*MYW(*InA)+MWZ(*InB)*MZW(*InA)+MWW(*InB)*MWW(*InA);
-
- MXX(*Out)=mxx; MXY(*Out)=mxy; MXZ(*Out)=mxz; MXW(*Out)=mxw;
- MYX(*Out)=myx; MYY(*Out)=myy; MYZ(*Out)=myz; MYW(*Out)=myw;
- MZX(*Out)=mzx; MZY(*Out)=mzy; MZZ(*Out)=mzz; MZW(*Out)=mzw;
- MWX(*Out)=mwx; MWY(*Out)=mwy; MWZ(*Out)=mwz; MWW(*Out)=mww;
- /* VP_NORMALIZE(*Out); */
- Out->type= (InA->type > InB->type) ? InA->type : InB->type;
- Out->refid=InA->refid;
- Out->insid=InB->insid;
-
- return TRUE;
-}
-
-/*
-* ===================================================================
-#fn vp_copy_motion
-#ft Copies the source motion to the destination motion.
-#fd DEFINITION
- void
- vp_copy_motion (const VP_MOTION *src, VP_MOTION *dst)
-#fd PURPOSE
- Copies the source motion to the destination motion.
- It is OK if src == dst.
- NOTE THAT THE SOURCE IS THE FIRST ARGUMENT.
- This is different from some of the other VP
- copy functions.
-#fd INPUTS
- src is the source motion
- dst is the destination motion
-#fd RETURNS
- void
-#endfn
-*/
-void vp_copy_motion (const VP_MOTION *src, VP_MOTION *dst)
-{
- /* Use memmove rather than memcpy because it handles overlapping memory
- OK. */
- memmove(dst, src, sizeof(VP_MOTION));
- return;
-} /* vp_copy_motion() */
-
-#define VP_SQR(x) ( (x)*(x) )
-double vp_motion_cornerdiff(const VP_MOTION *mot_a, const VP_MOTION *mot_b,
- int xo, int yo, int w, int h)
-{
- double ax1, ay1, ax2, ay2, ax3, ay3, ax4, ay4;
- double bx1, by1, bx2, by2, bx3, by3, bx4, by4;
- double err;
-
- /*lint -e639 -e632 -e633 */
- VP_WARP_POINT_2D(xo, yo, *mot_a, ax1, ay1);
- VP_WARP_POINT_2D(xo+w-1, yo, *mot_a, ax2, ay2);
- VP_WARP_POINT_2D(xo+w-1, yo+h-1, *mot_a, ax3, ay3);
- VP_WARP_POINT_2D(xo, yo+h-1, *mot_a, ax4, ay4);
- VP_WARP_POINT_2D(xo, yo, *mot_b, bx1, by1);
- VP_WARP_POINT_2D(xo+w-1, yo, *mot_b, bx2, by2);
- VP_WARP_POINT_2D(xo+w-1, yo+h-1, *mot_b, bx3, by3);
- VP_WARP_POINT_2D(xo, yo+h-1, *mot_b, bx4, by4);
- /*lint +e639 +e632 +e633 */
-
- err = 0;
- err += (VP_SQR(ax1 - bx1) + VP_SQR(ay1 - by1));
- err += (VP_SQR(ax2 - bx2) + VP_SQR(ay2 - by2));
- err += (VP_SQR(ax3 - bx3) + VP_SQR(ay3 - by3));
- err += (VP_SQR(ax4 - bx4) + VP_SQR(ay4 - by4));
-
- return(sqrt(err));
-}
-
-int vp_zoom_motion2d(VP_MOTION* in, VP_MOTION* out,
- int n, int w, int h, double zoom)
-{
- int ii;
- VP_PAR inv_zoom;
- VP_PAR cx, cy;
- VP_MOTION R2r,R2f;
- VP_MOTION *res;
-
- /* check for non-empty structures structure */
- if (((VP_MOTION *) NULL == in)||(zoom <= 0.0)||(w <= 0)||(h <= 0)) {
- return FALSE;
- }
-
- /* ==> Not sure why the special case of out=NULL is necessary. Why couldn't
- the caller just pass the same pointer for both in and out? */
- res = ((VP_MOTION *) NULL == out)?in:out;
-
- cx = (VP_PAR) (w/2.0);
- cy = (VP_PAR) (h/2.0);
-
- VP_MOTION_ID(R2r);
- inv_zoom = (VP_PAR)(1.0/zoom);
- MXX(R2r) = inv_zoom;
- MYY(R2r) = inv_zoom;
- MXW(R2r)=cx*(((VP_PAR)1.0) - inv_zoom);
- MYW(R2r)=cy*(((VP_PAR)1.0) - inv_zoom);
-
- VP_KEEP_AFFINE_2D(R2r);
-
- for(ii=0;ii<n;ii++) {
- (void) vp_cascade_motion(&R2r,in+ii,&R2f);
- res[ii]=R2f;
- }
-
- return TRUE;
-} /* vp_zoom_motion2d() */
-
-/* =================================================================== */
-/* end vp_motionmodel.c */
diff --git a/jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.h b/jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.h
deleted file mode 100644
index a63ac0010..000000000
--- a/jni_mosaic/feature_stab/src/dbreg/vp_motionmodel.h
+++ /dev/null
@@ -1,282 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
-#sourcefile vp_motionmodel.h
-#category warp
-#description general motion model for tranlation/affine/projective
-#title motion-model
-#parentlink hindex.html
-*
-* Copyright 1998 Sarnoff Corporation
-* All Rights Reserved
-*
-* Modification History
-* Date: 02/13/98
-* Author: supuns
-* Shop Order: 15491 001
-* @(#) $Id: vp_motionmodel.h,v 1.4 2011/06/17 14:04:33 mbansal Exp $
-*/
-
-#ifndef VP_MOTIONMODEL_H
-#define VP_MOTIONMODEL_H
-#include <stdio.h>
-
-#define FALSE 0
-#define TRUE 1
-
-#if 0 /* Moved mottomat.c and mattomot_d.c from vpmotion.h to vpcompat.h
- in order to remove otherwise unnecessary dependency of vpmotion,
- vpwarp, and newvpio on vpmath */
-#ifndef VPMATH_H
-#include "vpmath.h"
-#endif
-#endif
-
-#if 0
-#ifndef VP_WARP_H
-#include "vp_warp.h"
-#endif
-#endif
-/*
-
-#htmlstart
-# ===================================================================
-#h 1 Introduction
-
- This defines a motion model that can describe translation,
- affine, and projective projective 3d and 3d view transforms.
-
- The main structure VP_MOTION contains a 16 parameter array (That
- can be considered as elements of a 4x4 matrix) and a type field
- which can be one of VP_MOTION_NONE,VP_MOTION_TRANSLATION,
- VP_MOTION_AFFINE, VP_MOTION_PROJECTIVE,VP_MOTION_PROJ_3D or
- VP_MOTION_VIEW_3D. (These are defined using enums with gaps of 10
- so that subsets of these motions that are still consistant can be
- added in between. Motion models that are inconsistant with this set
- should be added at the end so the routines can hadle them
- independently.
-
- The transformation VP_MOTION_NONE,VP_MOTION_TRANSLATION,
- VP_MOTION_AFFINE, VP_MOTION_PROJECTIVE, VP_MOTION_PROJ_3D and
- VP_MOTION_SEMI_PROJ_3D would map a point P={x,y,z,w} to a new point
- P'={x',y',z',w'} using a motion model M such that P'= M.par * P.
- Where M.par is thought of as elements of a 4x4 matrix ordered row
- by row. The interpretation of all models except VP_MOTION_SEMI_PROJ_3D
- is taken to be mapping of a 3d point P"={x",y",z"} which is obtained
- from the normalization {x'/w',y'/w',z'/w'}. In the VP_MOTION_SEMI_PROJ_3D
- the mapping to a point P"={x",y",z"} is obtained from the normalization
- {x'/w',y'/w',z'}. All these motion models have the property that they
- can be inverted using 4x4 matrices. Except for the VP_MOTION_SEMI_PROJ_3D all
- other types can also be cascaded using 4x4 matrices.
-
- Specific macros and functions have been provided to handle 2d instances
- of these functions. As the parameter interpretations can change when adding
- new motion models it is HIGHLY RECOMMENDED that you use the macros MXX,MXY..
- ect. to interpret each motion component.
-#pre
-*/
-
-/*
-#endpre
-# ===================================================================
-#h 1 Typedef and Struct Declarations
-#pre
-*/
-
-#define VP_MAX_MOTION_PAR 16
-
-typedef double VP_PAR;
-typedef VP_PAR VP_TRS[VP_MAX_MOTION_PAR];
-
-/* Do not add any motion models before VP_MOTION_PROJECTIVE */
-/* The order is assumed in vp functions */
-enum VP_MOTION_MODEL {
- VP_MOTION_NONE=0,
- VP_MOTION_TRANSLATION=10,
- VP_MOTION_SCALE=11,
- VP_MOTION_ROTATE=12,
- VP_MOTION_X_SHEAR=13,
- VP_MOTION_Y_SHEAR=14,
- VP_MOTION_SIMILARITY=15,
- VP_MOTION_AFFINE=20,
- VP_MOTION_PROJECTIVE=30,
- VP_MOTION_PROJ_3D=40,
- VP_MOTION_SEMI_PROJ_3D=80,
- VP_SIMILARITY=100,
- VP_VFE_AFFINE=120
-};
-
-#define VP_REFID -1 /* Default ID used for reference frame */
-
-typedef struct {
- VP_TRS par; /* Contains the motion paramerers.
- For the standard motion types this is
- represented as 16 number that refer
- to a 4x4 matrix */
- enum VP_MOTION_MODEL type;
- int refid; /* Reference frame ( takes a point in refid frame
- and moves it by the par to get a point in insid
- frame ) */
- int insid; /* Inspection frame */
-} VP_MOTION;
-
-//typedef VP_LIST VP_MOTION_LIST;
-/*
-#endpre
-# ===================================================================
-#h 1 Constant Declarations
-*/
-
-/* Macros related to the 4x4 matrix parameters */
-#define MXX(m) (m).par[0]
-#define MXY(m) (m).par[1]
-#define MXZ(m) (m).par[2]
-#define MXW(m) (m).par[3]
-#define MYX(m) (m).par[4]
-#define MYY(m) (m).par[5]
-#define MYZ(m) (m).par[6]
-#define MYW(m) (m).par[7]
-#define MZX(m) (m).par[8]
-#define MZY(m) (m).par[9]
-#define MZZ(m) (m).par[10]
-#define MZW(m) (m).par[11]
-#define MWX(m) (m).par[12]
-#define MWY(m) (m).par[13]
-#define MWZ(m) (m).par[14]
-#define MWW(m) (m).par[15]
-
-/* The do {...} while (0) technique creates a statement that can be used legally
- in an if-else statement. See "Swallowing the semicolon",
- http://gcc.gnu.org/onlinedocs/gcc-2.95.3/cpp_1.html#SEC23 */
-/* Initialize the Motion to be Identity */
-#define VP_MOTION_ID(m) do {\
- MXX(m)=MYY(m)=MZZ(m)=MWW(m)=(VP_PAR)1.0; \
- MXY(m)=MXZ(m)=MXW(m)=(VP_PAR)0.0; \
- MYX(m)=MYZ(m)=MYW(m)=(VP_PAR)0.0; \
- MZX(m)=MZY(m)=MZW(m)=(VP_PAR)0.0; \
- MWX(m)=MWY(m)=MWZ(m)=(VP_PAR)0.0; \
-(m).type = VP_MOTION_TRANSLATION; } while (0)
-
-/* Initialize without altering the translation components */
-#define VP_KEEP_TRANSLATION_3D(m) do {\
- MXX(m)=MYY(m)=MZZ(m)=MWW(m)=(VP_PAR)1.0; \
- MXY(m)=MXZ(m)=(VP_PAR)0.0; \
- MYX(m)=MYZ(m)=(VP_PAR)0.0; \
- MZX(m)=MZY(m)=(VP_PAR)0.0; \
- MWX(m)=MWY(m)=MWZ(m)=(VP_PAR)0.0; \
- (m).type = VP_MOTION_PROJ_3D; } while (0)
-
-/* Initialize without altering the 2d translation components */
-#define VP_KEEP_TRANSLATION_2D(m) do {\
- VP_KEEP_TRANSLATION_3D(m); MZW(m)=(VP_PAR)0.0; (m).type= VP_MOTION_TRANSLATION;} while (0)
-
-/* Initialize without altering the affine & translation components */
-#define VP_KEEP_AFFINE_3D(m) do {\
- MWX(m)=MWY(m)=MWZ(m)=(VP_PAR)0.0; MWW(m)=(VP_PAR)1.0; \
- (m).type = VP_MOTION_PROJ_3D; } while (0)
-
-/* Initialize without altering the 2d affine & translation components */
-#define VP_KEEP_AFFINE_2D(m) do {\
- VP_KEEP_AFFINE_3D(m); \
- MXZ(m)=MYZ(m)=(VP_PAR)0.0; MZZ(m)=(VP_PAR)1.0; \
- MZX(m)=MZY(m)=MZW(m)=(VP_PAR)0.0; \
- (m).type = VP_MOTION_AFFINE; } while (0)
-
-/* Initialize without altering the 2d projective parameters */
-#define VP_KEEP_PROJECTIVE_2D(m) do {\
- MXZ(m)=MYZ(m)=(VP_PAR)0.0; MZZ(m)=(VP_PAR)1.0; \
- MZX(m)=MZY(m)=MZW(m)=MWZ(m)=(VP_PAR)0.0; \
- (m).type = VP_MOTION_PROJECTIVE; } while (0)
-
-/* Warp a 2d point (assuming the z component is zero) */
-#define VP_WARP_POINT_2D(inx,iny,m,outx,outy) do {\
- VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWW(m); \
- outx = (MXX(m)*((VP_PAR)inx)+MXY(m)*((VP_PAR)iny)+MXW(m))/vpTmpWarpPnt___; \
- outy = (MYX(m)*((VP_PAR)inx)+MYY(m)*((VP_PAR)iny)+MYW(m))/vpTmpWarpPnt___; } while (0)
-
-/* Warp a 3d point */
-#define VP_WARP_POINT_3D(inx,iny,inz,m,outx,outy,outz) do {\
- VP_PAR vpTmpWarpPnt___= MWX(m)*(inx)+MWY(m)*(iny)+MWZ(m)*((VP_PAR)inz)+MWW(m); \
- outx = (MXX(m)*((VP_PAR)inx)+MXY(m)*((VP_PAR)iny)+MXZ(m)*((VP_PAR)inz)+MXW(m))/vpTmpWarpPnt___; \
- outy = (MYX(m)*((VP_PAR)inx)+MYY(m)*((VP_PAR)iny)+MYZ(m)*((VP_PAR)inz)+MYW(m))/vpTmpWarpPnt___; \
- outz = MZX(m)*((VP_PAR)inx)+MZY(m)*((VP_PAR)iny)+MZZ(m)*((VP_PAR)inz)+MZW(m); \
- if ((m).type==VP_MOTION_PROJ_3D) outz/=vpTmpWarpPnt___; } while (0)
-
-/* Projections of each component */
-#define VP_PROJW_3D(m,x,y,z,f) ( MWX(m)*(x)+MWY(m)*(y)+MWZ(m)*(z)+MWW(m) )
-#define VP_PROJX_3D(m,x,y,z,f,w) ((MXX(m)*(x)+MXY(m)*(y)+MXZ(m)*(z)+MXW(m))/(w))
-#define VP_PROJY_3D(m,x,y,z,f,w) ((MYX(m)*(x)+MYY(m)*(y)+MYZ(m)*(z)+MYW(m))/(w))
-#define VP_PROJZ_3D(m,x,y,z,f,w) ((MZX(m)*(x)+MZY(m)*(y)+MZZ(m)*(z)+MZW(m))/(w))
-
-/* Scale Down a matrix by Sfactor */
-#define VP_SCALEDOWN(m,Sfactor) do { \
- MXW(m) /= (VP_PAR)Sfactor; MWX(m) *= (VP_PAR)Sfactor; \
- MYW(m) /= (VP_PAR)Sfactor; MWY(m) *= (VP_PAR)Sfactor; \
- MZW(m) /= (VP_PAR)Sfactor; MWZ(m) *= (VP_PAR)Sfactor; } while (0)
-
-/* Scale Up a matrix by Sfactor */
-#define VP_SCALEUP(m,Sfactor) do { \
- MXW(m) *= (VP_PAR)Sfactor; MWX(m) /= (VP_PAR)Sfactor; \
- MYW(m) *= (VP_PAR)Sfactor; MWY(m) /= (VP_PAR)Sfactor; \
- MZW(m) *= (VP_PAR)Sfactor; MWZ(m) /= (VP_PAR)Sfactor; } while (0)
-
-/* Normalize the transformation matrix so that MWW is 1 */
-#define VP_NORMALIZE(m) if (MWW(m)!=(VP_PAR)0.0) do { \
- MXX(m)/=MWW(m); MXY(m)/=MWW(m); MXZ(m)/=MWW(m); MXW(m)/= MWW(m); \
- MYX(m)/=MWW(m); MYY(m)/=MWW(m); MYZ(m)/=MWW(m); MYW(m)/= MWW(m); \
- MZX(m)/=MWW(m); MZY(m)/=MWW(m); MZZ(m)/=MWW(m); MZW(m)/= MWW(m); \
- MWX(m)/=MWW(m); MWY(m)/=MWW(m); MWZ(m)/=MWW(m); MWW(m) = (VP_PAR)1.0; } while (0)
-
-#define VP_PRINT_TRANS(msg,b) do { \
- fprintf(stderr, \
- "%s\n%f %f %f %f\n%f %f %f %f\n%f %f %f %f\n%f %f %f %f\n", \
- msg, \
- MXX(b),MXY(b),MXZ(b),MXW(b), \
- MYX(b),MYY(b),MYZ(b),MYW(b), \
- MZX(b),MZY(b),MZZ(b),MZW(b), \
- MWX(b),MWY(b),MWZ(b),MWW(b)); \
-} while (0)
-
-/* w' projection given a point x,y,0,f */
-#define VP_PROJZ(m,x,y,f) ( \
- MWX(m)*((VP_PAR)x)+MWY(m)*((VP_PAR)y)+MWW(m)*((VP_PAR)f))
-
-/* X Projection given a point x,y,0,f and w' */
-#define VP_PROJX(m,x,y,w,f) (\
- (MXX(m)*((VP_PAR)x)+MXY(m)*((VP_PAR)y)+MXW(m)*((VP_PAR)f))/((VP_PAR)w))
-
-/* Y Projection given a point x,y,0,f and the w' */
-#define VP_PROJY(m,x,y,w,f) (\
- (MYX(m)*((VP_PAR)x)+MYY(m)*((VP_PAR)y)+MYW(m)*((VP_PAR)f))/((VP_PAR)w))
-
-/* Set the reference id for a motion */
-#define VP_SET_REFID(m,id) do { (m).refid=id; } while (0)
-
-/* Set the inspection id for a motion */
-#define VP_SET_INSID(m,id) do { (m).insid=id; } while (0)
-
-void vp_copy_motion (const VP_MOTION *src, VP_MOTION *dst);
-int vp_invert_motion(const VP_MOTION* in,VP_MOTION* out);
-int vp_cascade_motion(const VP_MOTION* InAB, const VP_MOTION* InBC,VP_MOTION* OutAC);
-int vp_zoom_motion2d(VP_MOTION* in, VP_MOTION* out,
- int n, int w, int h, double zoom);
-double vp_motion_cornerdiff(const VP_MOTION *mot_a, const VP_MOTION *mot_b,
- int xo, int yo, int w, int h);
-
-#endif /* VP_MOTIONMODEL_H */
-/* =================================================================== */
-/* end vp_motionmodel.h */
diff --git a/jni_mosaic/feature_stab/src/dbregtest/PgmImage.cpp b/jni_mosaic/feature_stab/src/dbregtest/PgmImage.cpp
deleted file mode 100644
index 0891cfda6..000000000
--- a/jni_mosaic/feature_stab/src/dbregtest/PgmImage.cpp
+++ /dev/null
@@ -1,260 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "PgmImage.h"
-#include <cassert>
-
-using namespace std;
-
-PgmImage::PgmImage(std::string filename) :
-m_w(0),m_h(0),m_colors(255),m_format(PGM_BINARY_GRAYMAP),m_over_allocation(256)
-{
- if ( !ReadPGM(filename) )
- return;
-}
-
-PgmImage::PgmImage(int w, int h, int format) :
-m_colors(255),m_w(w),m_h(h),m_format(format),m_over_allocation(256)
-{
- SetFormat(format);
-}
-
-PgmImage::PgmImage(unsigned char *data, int w, int h) :
-m_colors(255),m_w(w),m_h(h),m_format(PGM_BINARY_GRAYMAP),m_over_allocation(256)
-{
- SetData(data);
-}
-
-PgmImage::PgmImage(std::vector<unsigned char> &data, int w, int h) :
-m_colors(255),m_w(w),m_h(h),m_format(PGM_BINARY_GRAYMAP),m_over_allocation(256)
-{
- if ( data.size() == w*h )
- SetData(&data[0]);
- else
- //throw (std::exception("Size of data is not w*h."));
- throw (std::exception());
-}
-
-PgmImage::PgmImage(const PgmImage &im) :
-m_colors(255),m_w(0),m_h(0),m_format(PGM_BINARY_GRAYMAP),m_over_allocation(256)
-{
- DeepCopy(im, *this);
-}
-
-PgmImage& PgmImage::operator= (const PgmImage &im)
-{
- if (this == &im) return *this;
- DeepCopy(im, *this);
- return *this;
-}
-
-void PgmImage::DeepCopy(const PgmImage& src, PgmImage& dst)
-{
- dst.m_data = src.m_data;
-
- // PGM data
- dst.m_w = src.m_w;
- dst.m_h = src.m_h;
- dst.m_format = src.m_format;
- dst.m_colors = src.m_colors;
-
- dst.m_comment = src.m_comment;
- SetupRowPointers();
-}
-
-PgmImage::~PgmImage()
-{
-
-}
-
-void PgmImage::SetFormat(int format)
-{
- m_format = format;
-
- switch (format)
- {
- case PGM_BINARY_GRAYMAP:
- m_data.resize(m_w*m_h+m_over_allocation);
- break;
- case PGM_BINARY_PIXMAP:
- m_data.resize(m_w*m_h*3+m_over_allocation);
- break;
- default:
- return;
- break;
- }
- SetupRowPointers();
-}
-
-void PgmImage::SetData(const unsigned char * data)
-{
- m_data.resize(m_w*m_h+m_over_allocation);
- memcpy(&m_data[0],data,m_w*m_h);
- SetupRowPointers();
-}
-
-bool PgmImage::ReadPGM(const std::string filename)
-{
- ifstream in(filename.c_str(),std::ios::in | std::ios::binary);
- if ( !in.is_open() )
- return false;
-
- // read the header:
- string format_header,size_header,colors_header;
-
- getline(in,format_header);
- stringstream s;
- s << format_header;
-
- s >> format_header >> m_w >> m_h >> m_colors;
- s.clear();
-
- if ( m_w == 0 )
- {
- while ( in.peek() == '#' )
- getline(in,m_comment);
-
- getline(in,size_header);
-
- while ( in.peek() == '#' )
- getline(in,m_comment);
-
- m_colors = 0;
-
- // parse header
- s << size_header;
- s >> m_w >> m_h >> m_colors;
- s.clear();
-
- if ( m_colors == 0 )
- {
- getline(in,colors_header);
- s << colors_header;
- s >> m_colors;
- }
- }
-
- if ( format_header == "P5" )
- m_format = PGM_BINARY_GRAYMAP;
- else if (format_header == "P6" )
- m_format = PGM_BINARY_PIXMAP;
- else
- m_format = PGM_FORMAT_INVALID;
-
- switch(m_format)
- {
- case(PGM_BINARY_GRAYMAP):
- m_data.resize(m_w*m_h+m_over_allocation);
- in.read((char *)(&m_data[0]),m_data.size());
- break;
- case(PGM_BINARY_PIXMAP):
- m_data.resize(m_w*m_h*3+m_over_allocation);
- in.read((char *)(&m_data[0]),m_data.size());
- break;
- default:
- return false;
- break;
- }
- in.close();
-
- SetupRowPointers();
-
- return true;
-}
-
-bool PgmImage::WritePGM(const std::string filename, const std::string comment)
-{
- string format_header;
-
- switch(m_format)
- {
- case PGM_BINARY_GRAYMAP:
- format_header = "P5\n";
- break;
- case PGM_BINARY_PIXMAP:
- format_header = "P6\n";
- break;
- default:
- return false;
- break;
- }
-
- ofstream out(filename.c_str(),std::ios::out |ios::binary);
- out << format_header << "# " << comment << '\n' << m_w << " " << m_h << '\n' << m_colors << '\n';
-
- out.write((char *)(&m_data[0]), m_data.size());
-
- out.close();
-
- return true;
-}
-
-void PgmImage::SetupRowPointers()
-{
- int i;
- m_rows.resize(m_h);
-
- switch (m_format)
- {
- case PGM_BINARY_GRAYMAP:
- for(i=0;i<m_h;i++)
- {
- m_rows[i]=&m_data[m_w*i];
- }
- break;
- case PGM_BINARY_PIXMAP:
- for(i=0;i<m_h;i++)
- {
- m_rows[i]=&m_data[(m_w*3)*i];
- }
- break;
- }
-}
-
-void PgmImage::ConvertToGray()
-{
- if ( m_format != PGM_BINARY_PIXMAP ) return;
-
- // Y = 0.3*R + 0.59*G + 0.11*B;
- for ( int i = 0; i < m_w*m_h; ++i )
- m_data[i] = (unsigned char)(0.3*m_data[3*i]+0.59*m_data[3*i+1]+0.11*m_data[3*i+2]);
-
- m_data.resize(m_w*m_h+m_over_allocation);
- m_format = PGM_BINARY_GRAYMAP;
-
- SetupRowPointers();
-}
-
-std::ostream& operator<< (std::ostream& o, const PgmImage& im)
-{
- o << "PGM Image Info:\n";
- o << "Size: " << im.m_w << " x " << im.m_h << "\n";
- o << "Comment: " << im.m_comment << "\n";
- switch (im.m_format)
- {
- case PgmImage::PGM_BINARY_PIXMAP:
- o << "Format: RGB binary pixmap";
- break;
- case PgmImage::PGM_BINARY_GRAYMAP:
- o << "Format: PPM binary graymap";
- break;
- default:
- o << "Format: Invalid";
- break;
- }
- o << endl;
- return o;
-}
diff --git a/jni_mosaic/feature_stab/src/dbregtest/PgmImage.h b/jni_mosaic/feature_stab/src/dbregtest/PgmImage.h
deleted file mode 100644
index d4d1eebed..000000000
--- a/jni_mosaic/feature_stab/src/dbregtest/PgmImage.h
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-#include <vector>
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <memory.h>
-
-/*!
- * Simple class to manipulate PGM/PPM images. Not suitable for heavy lifting.
- */
-class PgmImage
-{
- friend std::ostream& operator<< (std::ostream& o, const PgmImage& im);
-public:
- enum {PGM_BINARY_GRAYMAP,PGM_BINARY_PIXMAP,PGM_FORMAT_INVALID};
- /*!
- * Constructor from a PGM file name.
- */
- PgmImage(std::string filename);
- /*!
- * Constructor to allocate an image of given size and type.
- */
- PgmImage(int w, int h, int format = PGM_BINARY_GRAYMAP);
- /*!
- * Constructor to allocate an image of given size and copy the data in.
- */
- PgmImage(unsigned char *data, int w, int h);
- /*!
- * Constructor to allocate an image of given size and copy the data in.
- */
- PgmImage(std::vector<unsigned char> &data, int w, int h);
-
- PgmImage(const PgmImage &im);
-
- PgmImage& operator= (const PgmImage &im);
- ~PgmImage();
-
- int GetHeight() const { return m_h; }
- int GetWidth() const { return m_w; }
-
- //! Copy pixels from data pointer
- void SetData(const unsigned char * data);
-
- //! Get a data pointer to unaligned memory area
- unsigned char * GetDataPointer() { if ( m_data.size() > 0 ) return &m_data[0]; else return NULL; }
- unsigned char ** GetRowPointers() { if ( m_rows.size() == m_h ) return &m_rows[0]; else return NULL; }
-
- //! Read a PGM file from disk
- bool ReadPGM(const std::string filename);
- //! Write a PGM file to disk
- bool WritePGM(const std::string filename, const std::string comment="");
-
- //! Get image format (returns PGM_BINARY_GRAYMAP, PGM_BINARY_PIXMAP or PGM_FORMAT_INVALID)
- int GetFormat() const { return m_format; }
-
- //! Set image format (returns PGM_BINARY_GRAYMAP, PGM_BINARY_PIXMAP). Image data becomes invalid.
- void SetFormat(int format);
-
- //! If the image is PGM_BINARY_PIXMAP, convert it to PGM_BINARY_GRAYMAP via Y = 0.3*R + 0.59*G + 0.11*B.
- void ConvertToGray();
-protected:
- // Generic functions:
- void DeepCopy(const PgmImage& src, PgmImage& dst);
- void SetupRowPointers();
-
- // PGM data
- int m_w;
- int m_h;
- int m_format;
- int m_colors;
- int m_over_allocation;
- std::vector<unsigned char> m_data;
- std::string m_comment;
-
- std::vector<unsigned char *> m_rows;
-};
-
-std::ostream& operator<< (std::ostream& o, const PgmImage& im);
diff --git a/jni_mosaic/feature_stab/src/dbregtest/dbregtest.cpp b/jni_mosaic/feature_stab/src/dbregtest/dbregtest.cpp
deleted file mode 100644
index 508736218..000000000
--- a/jni_mosaic/feature_stab/src/dbregtest/dbregtest.cpp
+++ /dev/null
@@ -1,399 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// $Id: dbregtest.cpp,v 1.24 2011/06/17 14:04:33 mbansal Exp $
-#include "stdafx.h"
-#include "PgmImage.h"
-#include "../dbreg/dbreg.h"
-#include "../dbreg/dbstabsmooth.h"
-#include <db_utilities_camera.h>
-
-#include <iostream>
-#include <iomanip>
-
-#if PROFILE
- #include <sys/time.h>
-#endif
-
-
-using namespace std;
-
-const int DEFAULT_NR_CORNERS=500;
-const double DEFAULT_MAX_DISPARITY=0.2;
-const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_AFFINE;
-//const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_R_T;
-//const int DEFAULT_MOTION_MODEL=DB_HOMOGRAPHY_TYPE_TRANSLATION;
-const bool DEFAULT_QUARTER_RESOLUTION=false;
-const unsigned int DEFAULT_REFERENCE_UPDATE_PERIOD=3;
-const bool DEFAULT_DO_MOTION_SMOOTHING = false;
-const double DEFAULT_MOTION_SMOOTHING_GAIN = 0.75;
-const bool DEFAULT_LINEAR_POLISH = false;
-const int DEFAULT_MAX_ITERATIONS = 10;
-
-void usage(string name) {
-
- const char *helpmsg[] = {
- "Function: point-based frame to reference registration.",
- " -m [rt,a,p] : motion model, rt = rotation+translation, a = affine (default = affine).",
- " -c <int> : number of corners (default 1000).",
- " -d <double>: search disparity as portion of image size (default 0.1).",
- " -q : quarter the image resolution (i.e. half of each dimension) (default on)",
- " -r <int> : the period (in nr of frames) for reference frame updates (default = 5)",
- " -s <0/1> : motion smoothing (1 activates motion smoothing, 0 turns it off - default value = 1)",
- " -g <double>: motion smoothing gain, only used if smoothing is on (default value =0.75)",
- NULL
- };
-
- cerr << "Usage: " << name << " [options] image_list.txt" << endl;
-
- const char **p = helpmsg;
-
- while (*p)
- {
- cerr << *p++ << endl;
- }
-}
-
-void parse_cmd_line(stringstream& cmdline,
- const int argc,
- const string& progname,
- string& image_list_file_name,
- int& nr_corners,
- double& max_disparity,
- int& motion_model_type,
- bool& quarter_resolution,
- unsigned int& reference_update_period,
- bool& do_motion_smoothing,
- double& motion_smoothing_gain
- );
-
-int main(int argc, char* argv[])
-{
- int nr_corners = DEFAULT_NR_CORNERS;
- double max_disparity = DEFAULT_MAX_DISPARITY;
- int motion_model_type = DEFAULT_MOTION_MODEL;
- bool quarter_resolution = DEFAULT_QUARTER_RESOLUTION;
-
- unsigned int reference_update_period = DEFAULT_REFERENCE_UPDATE_PERIOD;
-
- bool do_motion_smoothing = DEFAULT_DO_MOTION_SMOOTHING;
- double motion_smoothing_gain = DEFAULT_MOTION_SMOOTHING_GAIN;
- const bool DEFAULT_USE_SMALLER_MATCHING_WINDOW = true;
-
- int default_nr_samples = DB_DEFAULT_NR_SAMPLES/5;
-
- bool use_smaller_matching_window = DEFAULT_USE_SMALLER_MATCHING_WINDOW;
-
-
- bool linear_polish = DEFAULT_LINEAR_POLISH;
-
- if (argc < 2) {
- usage(argv[0]);
- exit(1);
- }
-
- stringstream cmdline;
- string progname(argv[0]);
- string image_list_file_name;
-
-#if PROFILE
- timeval ts1, ts2, ts3, ts4;
-#endif
-
- // put the options and image list file name into the cmdline stringstream
- for (int c = 1; c < argc; c++)
- {
- cmdline << argv[c] << " ";
- }
-
- parse_cmd_line(cmdline, argc, progname, image_list_file_name, nr_corners, max_disparity, motion_model_type,quarter_resolution,reference_update_period,do_motion_smoothing,motion_smoothing_gain);
-
- ifstream in(image_list_file_name.c_str(),ios::in);
-
- if ( !in.is_open() )
- {
- cerr << "Could not open file " << image_list_file_name << ". Exiting" << endl;
-
- return false;
- }
-
- // feature-based image registration class:
- db_FrameToReferenceRegistration reg;
-// db_StabilizationSmoother stab_smoother;
-
- // input file name:
- string file_name;
-
- // look-up tables for image warping:
- float ** lut_x = NULL, **lut_y = NULL;
-
- // if the images are color, the input is saved in color_ref:
- PgmImage color_ref(0,0);
-
- // image width, height:
- int w,h;
-
- int frame_number = 0;
-
- while ( !in.eof() )
- {
- getline(in,file_name);
-
- PgmImage ref(file_name);
-
- if ( ref.GetDataPointer() == NULL )
- {
- cerr << "Could not open image" << file_name << ". Exiting." << endl;
- return -1;
- }
-
- cout << ref << endl;
-
- // color format:
- int format = ref.GetFormat();
-
- // is the input image color?:
- bool color = format == PgmImage::PGM_BINARY_PIXMAP;
-
- w = ref.GetWidth();
- h = ref.GetHeight();
-
- if ( !reg.Initialized() )
- {
- reg.Init(w,h,motion_model_type,DEFAULT_MAX_ITERATIONS,linear_polish,quarter_resolution,DB_POINT_STANDARDDEV,reference_update_period,do_motion_smoothing,motion_smoothing_gain,default_nr_samples,DB_DEFAULT_CHUNK_SIZE,nr_corners,max_disparity,use_smaller_matching_window);
- lut_x = db_AllocImage_f(w,h);
- lut_y = db_AllocImage_f(w,h);
-
- }
-
- if ( color )
- {
- // save the color image:
- color_ref = ref;
- }
-
- // make a grayscale image:
- ref.ConvertToGray();
-
- // compute the homography:
- double H[9],Hinv[9];
- db_Identity3x3(Hinv);
- db_Identity3x3(H);
-
- bool force_reference = false;
-
-#if PROFILE
- gettimeofday(&ts1, NULL);
-#endif
-
- reg.AddFrame(ref.GetRowPointers(),H,false,false);
- cout << reg.profile_string << std::endl;
-
-#if PROFILE
- gettimeofday(&ts2, NULL);
-
- double elapsedTime = (ts2.tv_sec - ts1.tv_sec)*1000.0; // sec to ms
- elapsedTime += (ts2.tv_usec - ts1.tv_usec)/1000.0; // us to ms
- cout <<"\nelapsedTime for Reg<< "<<elapsedTime<<" ms >>>>>>>>>>>>>\n";
-#endif
-
- if (frame_number == 0)
- {
- reg.UpdateReference(ref.GetRowPointers());
- }
-
-
- //std::vector<int> &inlier_indices = reg.GetInliers();
- int *inlier_indices = reg.GetInliers();
- int num_inlier_indices = reg.GetNrInliers();
- printf("[%d] #Inliers = %d\n",frame_number,num_inlier_indices);
-
- reg.Get_H_dref_to_ins(H);
-
- db_GenerateHomographyLut(lut_x,lut_y,w,h,H);
-
- // create a new image and warp:
- PgmImage warped(w,h,format);
-
-#if PROFILE
- gettimeofday(&ts3, NULL);
-#endif
-
- if ( color )
- db_WarpImageLutBilinear_rgb(color_ref.GetRowPointers(),warped.GetRowPointers(),w,h,lut_x,lut_y);
- else
- db_WarpImageLut_u(ref.GetRowPointers(),warped.GetRowPointers(),w,h,lut_x,lut_y,DB_WARP_FAST);
-
-#if PROFILE
- gettimeofday(&ts4, NULL);
- elapsedTime = (ts4.tv_sec - ts3.tv_sec)*1000.0; // sec to ms
- elapsedTime += (ts4.tv_usec - ts3.tv_usec)/1000.0; // us to ms
- cout <<"\nelapsedTime for Warp <<"<<elapsedTime<<" ms >>>>>>>>>>>>>\n";
-#endif
-
- // write aligned image: name is aligned_<corresponding input file name>
- stringstream s;
- s << "aligned_" << file_name;
- warped.WritePGM(s.str());
-
- /*
- // Get the reference and inspection corners to write to file
- double *ref_corners = reg.GetRefCorners();
- double *ins_corners = reg.GetInsCorners();
-
- // get the image file name (without extension), so we
- // can generate the corresponding filenames for matches
- // and inliers
- string file_name_root(file_name.substr(0,file_name.rfind(".")));
-
- // write matches to file
- s.str(string(""));
- s << "Matches_" << file_name_root << ".txt";
-
- ofstream match_file(s.str().c_str());
-
- for (int i = 0; i < reg.GetNrMatches(); i++)
- {
- match_file << ref_corners[3*i] << " " << ref_corners[3*i+1] << " " << ins_corners[3*i] << " " << ins_corners[3*i+1] << endl;
- }
-
- match_file.close();
-
- // write the inlier matches to file
- s.str(string(""));
- s << "InlierMatches_" << file_name_root << ".txt";
-
- ofstream inlier_match_file(s.str().c_str());
-
- for(int i=0; i<num_inlier_indices; i++)
- {
- int k = inlier_indices[i];
- inlier_match_file << ref_corners[3*k] << " "
- << ref_corners[3*k+1] << " "
- << ins_corners[3*k] << " "
- << ins_corners[3*k+1] << endl;
- }
- inlier_match_file.close();
- */
-
- frame_number++;
- }
-
- if ( reg.Initialized() )
- {
- db_FreeImage_f(lut_x,h);
- db_FreeImage_f(lut_y,h);
- }
-
- return 0;
-}
-
-void parse_cmd_line(stringstream& cmdline,
- const int argc,
- const string& progname,
- string& image_list_file_name,
- int& nr_corners,
- double& max_disparity,
- int& motion_model_type,
- bool& quarter_resolution,
- unsigned int& reference_update_period,
- bool& do_motion_smoothing,
- double& motion_smoothing_gain)
-{
- // for counting down the parsed arguments.
- int c = argc;
-
- // a holder
- string token;
-
- while (cmdline >> token)
- {
- --c;
-
- int pos = token.find("-");
-
- if (pos == 0)
- {
- switch (token[1])
- {
- case 'm':
- --c; cmdline >> token;
- if (token.compare("rt") == 0)
- {
- motion_model_type = DB_HOMOGRAPHY_TYPE_R_T;
- }
- else if (token.compare("a") == 0)
- {
- motion_model_type = DB_HOMOGRAPHY_TYPE_AFFINE;
- }
- else if (token.compare("p") == 0)
- {
- motion_model_type = DB_HOMOGRAPHY_TYPE_PROJECTIVE;
- }
- else
- {
- usage(progname);
- exit(1);
- }
- break;
- case 'c':
- --c; cmdline >> nr_corners;
- break;
- case 'd':
- --c; cmdline >> max_disparity;
- break;
- case 'q':
- quarter_resolution = true;
- break;
- case 'r':
- --c; cmdline >> reference_update_period;
- break;
- case 's':
- --c; cmdline >> do_motion_smoothing;
- break;
- case 'g':
- --c; cmdline >> motion_smoothing_gain;
- break;
- default:
- cerr << progname << "illegal option " << token << endl;
- case 'h':
- usage(progname);
- exit(1);
- break;
- }
- }
- else
- {
- if (c != 1)
- {
- usage(progname);
- exit(1);
- }
- else
- {
- --c;
- image_list_file_name = token;
- }
- }
- }
-
- if (c != 0)
- {
- usage(progname);
- exit(1);
- }
-}
-
diff --git a/jni_mosaic/feature_stab/src/dbregtest/stdafx.cpp b/jni_mosaic/feature_stab/src/dbregtest/stdafx.cpp
deleted file mode 100644
index 0c703e2dc..000000000
--- a/jni_mosaic/feature_stab/src/dbregtest/stdafx.cpp
+++ /dev/null
@@ -1,24 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// stdafx.cpp : source file that includes just the standard includes
-// dbregtest.pch will be the pre-compiled header
-// stdafx.obj will contain the pre-compiled type information
-
-#include "stdafx.h"
-
-// TODO: reference any additional headers you need in STDAFX.H
-// and not in this file
diff --git a/jni_mosaic/feature_stab/src/dbregtest/stdafx.h b/jni_mosaic/feature_stab/src/dbregtest/stdafx.h
deleted file mode 100644
index 9bc06ea04..000000000
--- a/jni_mosaic/feature_stab/src/dbregtest/stdafx.h
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-// stdafx.h : include file for standard system include files,
-// or project specific include files that are used frequently, but
-// are changed infrequently
-//
-
-#pragma once
-
-#include "targetver.h"
-
-#include <stdio.h>
-
-// TODO: reference additional headers your program requires here
diff --git a/jni_mosaic/feature_stab/src/dbregtest/targetver.h b/jni_mosaic/feature_stab/src/dbregtest/targetver.h
deleted file mode 100644
index 9272b0d6e..000000000
--- a/jni_mosaic/feature_stab/src/dbregtest/targetver.h
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#pragma once
-
-// The following macros define the minimum required platform. The minimum required platform
-// is the earliest version of Windows, Internet Explorer etc. that has the necessary features to run
-// your application. The macros work by enabling all features available on platform versions up to and
-// including the version specified.
-
-// Modify the following defines if you have to target a platform prior to the ones specified below.
-// Refer to MSDN for the latest info on corresponding values for different platforms.
-#ifndef _WIN32_WINNT // Specifies that the minimum required platform is Windows Vista.
-#define _WIN32_WINNT 0x0600 // Change this to the appropriate value to target other versions of Windows.
-#endif
-
diff --git a/src/com/android/camera/CameraActivity.java b/src/com/android/camera/CameraActivity.java
index 5ba769a62..c5cbb7e8a 100644
--- a/src/com/android/camera/CameraActivity.java
+++ b/src/com/android/camera/CameraActivity.java
@@ -16,146 +16,80 @@
package com.android.camera;
-import android.animation.Animator;
-import android.animation.AnimatorListenerAdapter;
-import android.animation.ObjectAnimator;
+import android.app.Activity;
+import android.content.BroadcastReceiver;
import android.content.ComponentName;
+import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
+import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.content.pm.ActivityInfo;
import android.content.res.Configuration;
-import android.graphics.drawable.Drawable;
+import android.graphics.drawable.ColorDrawable;
+import android.net.Uri;
import android.os.Bundle;
+import android.os.Handler;
import android.os.IBinder;
-import android.provider.MediaStore;
import android.provider.Settings;
import android.view.KeyEvent;
+import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.OrientationEventListener;
import android.view.View;
+import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
-import android.widget.FrameLayout;
+import android.widget.ImageView;
-import com.android.camera.ui.CameraSwitcher;
+import com.android.camera.data.CameraDataAdapter;
+import com.android.camera.data.LocalData;
+import com.android.camera.ui.CameraSwitcher.CameraSwitchListener;
+import com.android.camera.ui.FilmStripView;
import com.android.gallery3d.R;
-import com.android.gallery3d.app.PhotoPage;
import com.android.gallery3d.common.ApiHelper;
import com.android.gallery3d.util.LightCycleHelper;
-public class CameraActivity extends ActivityBase
- implements CameraSwitcher.CameraSwitchListener {
+public class CameraActivity extends Activity
+ implements CameraSwitchListener {
+
+ private static final String TAG = "CAM_Activity";
+
public static final int PHOTO_MODULE_INDEX = 0;
public static final int VIDEO_MODULE_INDEX = 1;
public static final int PANORAMA_MODULE_INDEX = 2;
public static final int LIGHTCYCLE_MODULE_INDEX = 3;
- CameraModule mCurrentModule;
- private FrameLayout mFrame;
- private ShutterButton mShutter;
- private CameraSwitcher mSwitcher;
- private View mCameraControls;
- private View mControlsBackground;
- private View mPieMenuButton;
- private Drawable[] mDrawables;
+ private static final String INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE =
+ "android.media.action.STILL_IMAGE_CAMERA_SECURE";
+ public static final String ACTION_IMAGE_CAPTURE_SECURE =
+ "android.media.action.IMAGE_CAPTURE_SECURE";
+
+ // The intent extra for camera from secure lock screen. True if the gallery
+ // should only show newly captured pictures. sSecureAlbumId does not
+ // increment. This is used when switching between camera, camcorder, and
+ // panorama. If the extra is not set, it is in the normal camera mode.
+ public static final String SECURE_CAMERA_EXTRA = "secure_camera";
+
+ private CameraDataAdapter mDataAdapter;
private int mCurrentModuleIndex;
- private MotionEvent mDown;
+ private CameraModule mCurrentModule;
+ private View mRootView;
+ private FilmStripView mFilmStripView;
+ private int mResultCodeForTesting;
+ private Intent mResultDataForTesting;
+ private OnScreenHint mStorageHint;
+ private long mStorageSpace = Storage.LOW_STORAGE_THRESHOLD;
+ private PhotoModule mController;
private boolean mAutoRotateScreen;
- private int mHeightOrWidth = -1;
-
+ private boolean mSecureCamera;
+ private boolean mShowCameraPreview;
+ private int mLastRawOrientation;
private MyOrientationEventListener mOrientationListener;
- // The degrees of the device rotated clockwise from its natural orientation.
- private int mLastRawOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
-
- private MediaSaveService mMediaSaveService;
- private ServiceConnection mConnection = new ServiceConnection() {
- @Override
- public void onServiceConnected(ComponentName className, IBinder b) {
- mMediaSaveService = ((MediaSaveService.LocalBinder) b).getService();
- mCurrentModule.onMediaSaveServiceConnected(mMediaSaveService);
- }
- @Override
- public void onServiceDisconnected(ComponentName className) {
- mMediaSaveService = null;
- }};
-
- private static final String TAG = "CAM_activity";
-
- private static final int[] DRAW_IDS = {
- R.drawable.ic_switch_camera,
- R.drawable.ic_switch_video,
- R.drawable.ic_switch_pan,
- R.drawable.ic_switch_photosphere
- };
-
- @Override
- public void onCreate(Bundle state) {
- super.onCreate(state);
- setContentView(R.layout.camera_main);
- mFrame = (FrameLayout) findViewById(R.id.camera_app_root);
- mDrawables = new Drawable[DRAW_IDS.length];
- for (int i = 0; i < DRAW_IDS.length; i++) {
- mDrawables[i] = getResources().getDrawable(DRAW_IDS[i]);
- }
- init();
- if (MediaStore.INTENT_ACTION_VIDEO_CAMERA.equals(getIntent().getAction())
- || MediaStore.ACTION_VIDEO_CAPTURE.equals(getIntent().getAction())) {
- mCurrentModule = new VideoModule();
- mCurrentModuleIndex = VIDEO_MODULE_INDEX;
- } else {
- mCurrentModule = new PhotoModule();
- mCurrentModuleIndex = PHOTO_MODULE_INDEX;
- }
- mCurrentModule.init(this, mFrame, true);
- mSwitcher.setCurrentIndex(mCurrentModuleIndex);
- mOrientationListener = new MyOrientationEventListener(this);
- bindMediaSaveService();
- }
-
- public void init() {
- boolean landscape = Util.getDisplayRotation(this) % 180 == 90;
- mControlsBackground = findViewById(R.id.blocker);
- mCameraControls = findViewById(R.id.camera_controls);
- mShutter = (ShutterButton) findViewById(R.id.shutter_button);
- mSwitcher = (CameraSwitcher) findViewById(R.id.camera_switcher);
- mPieMenuButton = findViewById(R.id.menu);
- int totaldrawid = (LightCycleHelper.hasLightCycleCapture(this)
- ? DRAW_IDS.length : DRAW_IDS.length - 1);
- if (!ApiHelper.HAS_OLD_PANORAMA) totaldrawid--;
-
- int[] drawids = new int[totaldrawid];
- int[] moduleids = new int[totaldrawid];
- int ix = 0;
- for (int i = 0; i < mDrawables.length; i++) {
- if (i == PANORAMA_MODULE_INDEX && !ApiHelper.HAS_OLD_PANORAMA) {
- continue; // not enabled, so don't add to UI
- }
- if (i == LIGHTCYCLE_MODULE_INDEX && !LightCycleHelper.hasLightCycleCapture(this)) {
- continue; // not enabled, so don't add to UI
- }
- moduleids[ix] = i;
- drawids[ix++] = DRAW_IDS[i];
- }
- mSwitcher.setIds(moduleids, drawids);
- mSwitcher.setSwitchListener(this);
- mSwitcher.setCurrentIndex(mCurrentModuleIndex);
- }
-
- @Override
- public void onDestroy() {
- unbindMediaSaveService();
- super.onDestroy();
- }
-
- // Return whether the auto-rotate screen in system settings
- // is turned on.
- public boolean isAutoRotateScreen() {
- return mAutoRotateScreen;
- }
+ private Handler mMainHandler;
private class MyOrientationEventListener
- extends OrientationEventListener {
+ extends OrientationEventListener {
public MyOrientationEventListener(Context context) {
super(context);
}
@@ -171,155 +105,161 @@ public class CameraActivity extends ActivityBase
}
}
- private ObjectAnimator mCameraSwitchAnimator;
-
- @Override
- public void onCameraSelected(final int i) {
- if (mPaused) return;
- if (i != mCurrentModuleIndex) {
- mPaused = true;
- CameraScreenNail screenNail = getCameraScreenNail();
- if (screenNail != null) {
- if (mCameraSwitchAnimator != null && mCameraSwitchAnimator.isRunning()) {
- mCameraSwitchAnimator.cancel();
- }
- mCameraSwitchAnimator = ObjectAnimator.ofFloat(
- screenNail, "alpha", screenNail.getAlpha(), 0f);
- mCameraSwitchAnimator.addListener(new AnimatorListenerAdapter() {
- @Override
- public void onAnimationEnd(Animator animation) {
- super.onAnimationEnd(animation);
- doChangeCamera(i);
- }
- });
- mCameraSwitchAnimator.start();
- } else {
- doChangeCamera(i);
+ private MediaSaveService mMediaSaveService;
+ private ServiceConnection mConnection = new ServiceConnection() {
+ @Override
+ public void onServiceConnected(ComponentName className, IBinder b) {
+ mMediaSaveService = ((MediaSaveService.LocalBinder) b).getService();
+ mCurrentModule.onMediaSaveServiceConnected(mMediaSaveService);
}
- }
- }
-
- private void doChangeCamera(int i) {
- boolean canReuse = canReuseScreenNail();
- CameraHolder.instance().keep();
- closeModule(mCurrentModule);
- mCurrentModuleIndex = i;
- switch (i) {
- case VIDEO_MODULE_INDEX:
- mCurrentModule = new VideoModule();
- break;
- case PHOTO_MODULE_INDEX:
- mCurrentModule = new PhotoModule();
- break;
- case PANORAMA_MODULE_INDEX:
- mCurrentModule = new PanoramaModule();
- break;
- case LIGHTCYCLE_MODULE_INDEX:
- mCurrentModule = LightCycleHelper.createPanoramaModule();
- break;
- }
- showPieMenuButton(mCurrentModule.needsPieMenu());
-
- openModule(mCurrentModule, canReuse);
- mCurrentModule.onOrientationChanged(mLastRawOrientation);
- if (mMediaSaveService != null) {
- mCurrentModule.onMediaSaveServiceConnected(mMediaSaveService);
- }
- getCameraScreenNail().setAlpha(0f);
- getCameraScreenNail().setOnFrameDrawnOneShot(mOnFrameDrawn);
- }
-
- public void showPieMenuButton(boolean show) {
- if (show) {
- findViewById(R.id.blocker).setVisibility(View.VISIBLE);
- findViewById(R.id.menu).setVisibility(View.VISIBLE);
- findViewById(R.id.on_screen_indicators).setVisibility(View.VISIBLE);
- } else {
- findViewById(R.id.blocker).setVisibility(View.INVISIBLE);
- findViewById(R.id.menu).setVisibility(View.INVISIBLE);
- findViewById(R.id.on_screen_indicators).setVisibility(View.INVISIBLE);
- }
- }
-
- private Runnable mOnFrameDrawn = new Runnable() {
+ @Override
+ public void onServiceDisconnected(ComponentName className) {
+ mMediaSaveService = null;
+ }};
+ // close activity when screen turns off
+ private BroadcastReceiver mScreenOffReceiver = new BroadcastReceiver() {
@Override
- public void run() {
- runOnUiThread(mFadeInCameraScreenNail);
+ public void onReceive(Context context, Intent intent) {
+ finish();
}
};
- private Runnable mFadeInCameraScreenNail = new Runnable() {
+ private FilmStripView.Listener mFilmStripListener = new FilmStripView.Listener() {
+ @Override
+ public void onDataPromoted(int dataID) {
+ removeData(dataID);
+ }
- @Override
- public void run() {
- mCameraSwitchAnimator = ObjectAnimator.ofFloat(
- getCameraScreenNail(), "alpha", 0f, 1f);
- mCameraSwitchAnimator.setStartDelay(50);
- mCameraSwitchAnimator.start();
- }
- };
+ @Override
+ public void onDataDemoted(int dataID) {
+ removeData(dataID);
+ }
- @Override
- public void onShowSwitcherPopup() {
- mCurrentModule.onShowSwitcherPopup();
- }
+ @Override
+ public void onDataFullScreenChange(int dataID, boolean full) {
+ }
- private void openModule(CameraModule module, boolean canReuse) {
- module.init(this, mFrame, canReuse && canReuseScreenNail());
- mPaused = false;
- module.onResumeBeforeSuper();
- module.onResumeAfterSuper();
- }
+ @Override
+ public void onSwitchMode(boolean toCamera) {
+ mCurrentModule.onSwitchMode(toCamera);
+ }
+ };
- private void closeModule(CameraModule module) {
- module.onPauseBeforeSuper();
- module.onPauseAfterSuper();
- mFrame.removeAllViews();
+ private Runnable mDeletionRunnable = new Runnable() {
+ @Override
+ public void run() {
+ mDataAdapter.executeDeletion(CameraActivity.this);
+ }
+ };
+
+ public MediaSaveService getMediaSaveService() {
+ return mMediaSaveService;
}
- public ShutterButton getShutterButton() {
- return mShutter;
+ public void notifyNewMedia(Uri uri) {
+ ContentResolver cr = getContentResolver();
+ String mimeType = cr.getType(uri);
+ if (mimeType.startsWith("video/")) {
+ sendBroadcast(new Intent(Util.ACTION_NEW_VIDEO, uri));
+ mDataAdapter.addNewVideo(cr, uri);
+ } else if (mimeType.startsWith("image/")) {
+ Util.broadcastNewPicture(this, uri);
+ mDataAdapter.addNewPhoto(cr, uri);
+ } else {
+ android.util.Log.w(TAG, "Unknown new media with MIME type:"
+ + mimeType + ", uri:" + uri);
+ }
}
- public void hideUI() {
- mCameraControls.setVisibility(View.INVISIBLE);
- hideSwitcher();
- mShutter.setVisibility(View.GONE);
+ private void removeData(int dataID) {
+ mDataAdapter.removeData(CameraActivity.this, dataID);
+ mMainHandler.removeCallbacks(mDeletionRunnable);
+ mMainHandler.postDelayed(mDeletionRunnable, 3000);
}
- public void showUI() {
- mCameraControls.setVisibility(View.VISIBLE);
- showSwitcher();
- mShutter.setVisibility(View.VISIBLE);
- // Force a layout change to show shutter button
- mShutter.requestLayout();
+ private void bindMediaSaveService() {
+ Intent intent = new Intent(this, MediaSaveService.class);
+ startService(intent); // start service before binding it so the
+ // service won't be killed if we unbind it.
+ bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
}
- public void hideSwitcher() {
- mSwitcher.closePopup();
- mSwitcher.setVisibility(View.INVISIBLE);
+ private void unbindMediaSaveService() {
+ if (mMediaSaveService != null) {
+ mMediaSaveService.setListener(null);
+ }
+ if (mConnection != null) {
+ unbindService(mConnection);
+ }
}
- public void showSwitcher() {
- if (mCurrentModule.needsSwitcher()) {
- mSwitcher.setVisibility(View.VISIBLE);
+ @Override
+ public void onCreate(Bundle state) {
+ super.onCreate(state);
+ setContentView(R.layout.camera_filmstrip);
+ if (ApiHelper.HAS_ROTATION_ANIMATION) {
+ setRotationAnimation();
+ }
+ // Check if this is in the secure camera mode.
+ Intent intent = getIntent();
+ String action = intent.getAction();
+ if (INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(action)
+ || ACTION_IMAGE_CAPTURE_SECURE.equals(action)) {
+ mSecureCamera = true;
+ } else {
+ mSecureCamera = intent.getBooleanExtra(SECURE_CAMERA_EXTRA, false);
+ }
+
+ if (mSecureCamera) {
+ // Change the window flags so that secure camera can show when locked
+ Window win = getWindow();
+ WindowManager.LayoutParams params = win.getAttributes();
+ params.flags |= WindowManager.LayoutParams.FLAG_SHOW_WHEN_LOCKED;
+ win.setAttributes(params);
+
+ // Filter for screen off so that we can finish secure camera activity
+ // when screen is off.
+ IntentFilter filter = new IntentFilter(Intent.ACTION_SCREEN_OFF);
+ registerReceiver(mScreenOffReceiver, filter);
}
+ LayoutInflater inflater = getLayoutInflater();
+ View rootLayout = inflater.inflate(R.layout.camera, null, false);
+ mRootView = rootLayout.findViewById(R.id.camera_app_root);
+ mDataAdapter = new CameraDataAdapter(
+ new ColorDrawable(getResources().getColor(R.color.photo_placeholder)));
+ mFilmStripView = (FilmStripView) findViewById(R.id.filmstrip_view);
+ mFilmStripView.setViewGap(
+ getResources().getDimensionPixelSize(R.dimen.camera_film_strip_gap));
+ // Set up the camera preview first so the preview shows up ASAP.
+ mDataAdapter.setCameraPreviewInfo(rootLayout,
+ FilmStripView.ImageData.SIZE_FULL, FilmStripView.ImageData.SIZE_FULL);
+ mFilmStripView.setDataAdapter(mDataAdapter);
+ mFilmStripView.setListener(mFilmStripListener);
+ mCurrentModule = new PhotoModule();
+ mCurrentModule.init(this, mRootView);
+ mOrientationListener = new MyOrientationEventListener(this);
+ mMainHandler = new Handler(getMainLooper());
+ bindMediaSaveService();
}
- public boolean isInCameraApp() {
- return mShowCameraAppView;
+ private void setRotationAnimation() {
+ int rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_ROTATE;
+ rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_CROSSFADE;
+ Window win = getWindow();
+ WindowManager.LayoutParams winParams = win.getAttributes();
+ winParams.rotationAnimation = rotationAnimation;
+ win.setAttributes(winParams);
}
@Override
- public void onConfigurationChanged(Configuration config) {
- super.onConfigurationChanged(config);
- mCurrentModule.onConfigurationChanged(config);
+ public void onUserInteraction() {
+ super.onUserInteraction();
+ mCurrentModule.onUserInteraction();
}
@Override
public void onPause() {
- mPaused = true;
mOrientationListener.disable();
mCurrentModule.onPauseBeforeSuper();
super.onPause();
@@ -328,7 +268,6 @@ public class CameraActivity extends ActivityBase
@Override
public void onResume() {
- mPaused = false;
if (Settings.System.getInt(getContentResolver(),
Settings.System.ACCELEROMETER_ROTATION, 0) == 0) {// auto-rotate off
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
@@ -341,180 +280,186 @@ public class CameraActivity extends ActivityBase
mCurrentModule.onResumeBeforeSuper();
super.onResume();
mCurrentModule.onResumeAfterSuper();
- }
- private void bindMediaSaveService() {
- Intent intent = new Intent(this, MediaSaveService.class);
- bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
- }
-
- private void unbindMediaSaveService() {
- if (mMediaSaveService != null) {
- mMediaSaveService.setListener(null);
- }
- if (mConnection != null) {
- unbindService(mConnection);
- }
- }
-
- @Override
- protected void onFullScreenChanged(boolean full) {
- if (full) {
- showUI();
+ // The loading is done in background and will update the filmstrip later.
+ if (!mSecureCamera) {
+ mDataAdapter.requestLoad(getContentResolver());
} else {
- hideUI();
+ // Flush out all the original data first.
+ mDataAdapter.flush();
+ ImageView v = (ImageView) getLayoutInflater().inflate(
+ R.layout.secure_album_placeholder, null);
+ // Put a lock placeholder as the last image by setting its date to 0.
+ mDataAdapter.addLocalData(
+ new LocalData.LocalViewData(
+ v,
+ v.getDrawable().getIntrinsicWidth(),
+ v.getDrawable().getIntrinsicHeight(),
+ 0, 0));
}
- super.onFullScreenChanged(full);
- if (ApiHelper.HAS_ROTATION_ANIMATION) {
- setRotationAnimation(full);
- }
- mCurrentModule.onFullScreenChanged(full);
- }
-
- private void setRotationAnimation(boolean fullscreen) {
- int rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_ROTATE;
- if (fullscreen) {
- rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_CROSSFADE;
- }
- Window win = getWindow();
- WindowManager.LayoutParams winParams = win.getAttributes();
- winParams.rotationAnimation = rotationAnimation;
- win.setAttributes(winParams);
+ setSwipingEnabled(true);
}
@Override
- protected void onStop() {
- super.onStop();
- mCurrentModule.onStop();
- getStateManager().clearTasks();
+ public void onDestroy() {
+ unbindMediaSaveService();
+ if (mSecureCamera) unregisterReceiver(mScreenOffReceiver);
+ super.onDestroy();
}
@Override
- protected void onNewIntent(Intent intent) {
- super.onNewIntent(intent);
- getStateManager().clearActivityResult();
+ public void onConfigurationChanged(Configuration config) {
+ super.onConfigurationChanged(config);
+ mCurrentModule.onConfigurationChanged(config);
}
@Override
- protected void installIntentFilter() {
- super.installIntentFilter();
- mCurrentModule.installIntentFilter();
+ public boolean onKeyDown(int keyCode, KeyEvent event) {
+ if (mCurrentModule.onKeyDown(keyCode, event)) return true;
+ // Prevent software keyboard or voice search from showing up.
+ if (keyCode == KeyEvent.KEYCODE_SEARCH
+ || keyCode == KeyEvent.KEYCODE_MENU) {
+ if (event.isLongPress()) return true;
+ }
+ if (keyCode == KeyEvent.KEYCODE_MENU && mShowCameraPreview) {
+ return true;
+ }
+
+ return super.onKeyDown(keyCode, event);
}
@Override
- protected void onActivityResult(
- int requestCode, int resultCode, Intent data) {
- // Only PhotoPage understands ProxyLauncher.RESULT_USER_CANCELED
- if (resultCode == ProxyLauncher.RESULT_USER_CANCELED
- && !(getStateManager().getTopState() instanceof PhotoPage)) {
- resultCode = RESULT_CANCELED;
- }
- super.onActivityResult(requestCode, resultCode, data);
- // Unmap cancel vs. reset
- if (resultCode == ProxyLauncher.RESULT_USER_CANCELED) {
- resultCode = RESULT_CANCELED;
+ public boolean onKeyUp(int keyCode, KeyEvent event) {
+ if (mCurrentModule.onKeyUp(keyCode, event)) return true;
+ if (keyCode == KeyEvent.KEYCODE_MENU && mShowCameraPreview) {
+ return true;
}
- mCurrentModule.onActivityResult(requestCode, resultCode, data);
+ return super.onKeyUp(keyCode, event);
}
- // Preview area is touched. Handle touch focus.
- // Touch to focus is handled by PreviewGestures, this function call
- // is no longer needed. TODO: Clean it up in the next refactor
@Override
- protected void onSingleTapUp(View view, int x, int y) {
+ public boolean dispatchTouchEvent(MotionEvent m) {
+ return mFilmStripView.dispatchTouchEvent(m);
+ }
+ public boolean isAutoRotateScreen() {
+ return mAutoRotateScreen;
}
- @Override
- public void onBackPressed() {
- if (!mCurrentModule.onBackPressed()) {
- super.onBackPressed();
- }
+ protected void updateStorageSpace() {
+ mStorageSpace = Storage.getAvailableSpace();
}
- @Override
- public boolean onKeyDown(int keyCode, KeyEvent event) {
- return mCurrentModule.onKeyDown(keyCode, event)
- || super.onKeyDown(keyCode, event);
+ protected long getStorageSpace() {
+ return mStorageSpace;
}
- @Override
- public boolean onKeyUp(int keyCode, KeyEvent event) {
- return mCurrentModule.onKeyUp(keyCode, event)
- || super.onKeyUp(keyCode, event);
+ protected void updateStorageSpaceAndHint() {
+ updateStorageSpace();
+ updateStorageHint(mStorageSpace);
}
- public void cancelActivityTouchHandling() {
- if (mDown != null) {
- MotionEvent cancel = MotionEvent.obtain(mDown);
- cancel.setAction(MotionEvent.ACTION_CANCEL);
- super.dispatchTouchEvent(cancel);
- }
+ protected void updateStorageHint() {
+ updateStorageHint(mStorageSpace);
}
- @Override
- public boolean dispatchTouchEvent(MotionEvent m) {
- if (m.getActionMasked() == MotionEvent.ACTION_DOWN) {
- mDown = m;
+ protected boolean updateStorageHintOnResume() {
+ return true;
+ }
+
+ protected void updateStorageHint(long storageSpace) {
+ String message = null;
+ if (storageSpace == Storage.UNAVAILABLE) {
+ message = getString(R.string.no_storage);
+ } else if (storageSpace == Storage.PREPARING) {
+ message = getString(R.string.preparing_sd);
+ } else if (storageSpace == Storage.UNKNOWN_SIZE) {
+ message = getString(R.string.access_sd_fail);
+ } else if (storageSpace <= Storage.LOW_STORAGE_THRESHOLD) {
+ message = getString(R.string.spaceIsLow_content);
}
- if ((mSwitcher != null) && mSwitcher.showsPopup() && !mSwitcher.isInsidePopup(m)) {
- return mSwitcher.onTouch(null, m);
- } else if ((mSwitcher != null) && mSwitcher.isInsidePopup(m)) {
- return superDispatchTouchEvent(m);
- } else {
- return mCurrentModule.dispatchTouchEvent(m);
+
+ if (message != null) {
+ if (mStorageHint == null) {
+ mStorageHint = OnScreenHint.makeText(this, message);
+ } else {
+ mStorageHint.setText(message);
+ }
+ mStorageHint.show();
+ } else if (mStorageHint != null) {
+ mStorageHint.cancel();
+ mStorageHint = null;
}
}
- @Override
- public void startActivityForResult(Intent intent, int requestCode) {
- Intent proxyIntent = new Intent(this, ProxyLauncher.class);
- proxyIntent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_WHEN_TASK_RESET);
- proxyIntent.putExtra(Intent.EXTRA_INTENT, intent);
- super.startActivityForResult(proxyIntent, requestCode);
+ protected void setResultEx(int resultCode) {
+ mResultCodeForTesting = resultCode;
+ setResult(resultCode);
}
- public boolean superDispatchTouchEvent(MotionEvent m) {
- return super.dispatchTouchEvent(m);
+ protected void setResultEx(int resultCode, Intent data) {
+ mResultCodeForTesting = resultCode;
+ mResultDataForTesting = data;
+ setResult(resultCode, data);
}
- // Preview texture has been copied. Now camera can be released and the
- // animation can be started.
- @Override
- public void onPreviewTextureCopied() {
- mCurrentModule.onPreviewTextureCopied();
+ public int getResultCode() {
+ return mResultCodeForTesting;
}
- @Override
- public void onCaptureTextureCopied() {
- mCurrentModule.onCaptureTextureCopied();
+ public Intent getResultData() {
+ return mResultDataForTesting;
}
- @Override
- public void onUserInteraction() {
- super.onUserInteraction();
- mCurrentModule.onUserInteraction();
+ public boolean isSecureCamera() {
+ return mSecureCamera;
}
@Override
- protected boolean updateStorageHintOnResume() {
- return mCurrentModule.updateStorageHintOnResume();
+ public void onCameraSelected(int i) {
+ if (mCurrentModuleIndex == i) return;
+
+ CameraHolder.instance().keep();
+ closeModule(mCurrentModule);
+ mCurrentModuleIndex = i;
+ switch (i) {
+ case VIDEO_MODULE_INDEX:
+ mCurrentModule = new VideoModule();
+ break;
+ case PHOTO_MODULE_INDEX:
+ mCurrentModule = new PhotoModule();
+ break;
+ case LIGHTCYCLE_MODULE_INDEX:
+ mCurrentModule = LightCycleHelper.createPanoramaModule();
+ break;
+ default:
+ break;
+ }
+
+ openModule(mCurrentModule);
+ mCurrentModule.onOrientationChanged(mLastRawOrientation);
+ if (mMediaSaveService != null) {
+ mCurrentModule.onMediaSaveServiceConnected(mMediaSaveService);
+ }
}
- @Override
- public void updateCameraAppView() {
- super.updateCameraAppView();
- mCurrentModule.updateCameraAppView();
+ private void openModule(CameraModule module) {
+ module.init(this, mRootView);
+ module.onResumeBeforeSuper();
+ module.onResumeAfterSuper();
}
- private boolean canReuseScreenNail() {
- return mCurrentModuleIndex == PHOTO_MODULE_INDEX
- || mCurrentModuleIndex == VIDEO_MODULE_INDEX
- || mCurrentModuleIndex == LIGHTCYCLE_MODULE_INDEX;
+ private void closeModule(CameraModule module) {
+ module.onPauseBeforeSuper();
+ module.onPauseAfterSuper();
+ ((ViewGroup) mRootView).removeAllViews();
}
@Override
- public boolean isPanoramaActivity() {
- return (mCurrentModuleIndex == PANORAMA_MODULE_INDEX);
+ public void onShowSwitcherPopup() {
+ }
+
+ public void setSwipingEnabled(boolean enable) {
+ mDataAdapter.setCameraPreviewLock(!enable);
}
// Accessor methods for getting latency times used in performance testing
@@ -552,12 +497,4 @@ public class CameraActivity extends ActivityBase
return (mCurrentModule instanceof VideoModule) ?
((VideoModule) mCurrentModule).isRecording() : false;
}
-
- public CameraScreenNail getCameraScreenNail() {
- return (CameraScreenNail) mCameraScreenNail;
- }
-
- public MediaSaveService getMediaSaveService() {
- return mMediaSaveService;
- }
}
diff --git a/src/com/android/camera/CameraModule.java b/src/com/android/camera/CameraModule.java
index 3275d5f51..bcfe98d65 100644
--- a/src/com/android/camera/CameraModule.java
+++ b/src/com/android/camera/CameraModule.java
@@ -24,9 +24,9 @@ import android.view.View;
public interface CameraModule {
- public void init(CameraActivity activity, View frame, boolean reuseScreenNail);
+ public void init(CameraActivity activity, View frame);
- public void onFullScreenChanged(boolean full);
+ public void onSwitchMode(boolean toCamera);
public void onPauseBeforeSuper();
@@ -52,8 +52,6 @@ public interface CameraModule {
public void onSingleTapUp(View view, int x, int y);
- public boolean dispatchTouchEvent(MotionEvent m);
-
public void onPreviewTextureCopied();
public void onCaptureTextureCopied();
@@ -64,10 +62,6 @@ public interface CameraModule {
public void updateCameraAppView();
- public boolean needsSwitcher();
-
- public boolean needsPieMenu();
-
public void onOrientationChanged(int orientation);
public void onShowSwitcherPopup();
diff --git a/src/com/android/camera/Mosaic.java b/src/com/android/camera/Mosaic.java
deleted file mode 100644
index 78876c384..000000000
--- a/src/com/android/camera/Mosaic.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-/**
- * The Java interface to JNI calls regarding mosaic stitching.
- *
- * A high-level usage is:
- *
- * Mosaic mosaic = new Mosaic();
- * mosaic.setSourceImageDimensions(width, height);
- * mosaic.reset(blendType);
- *
- * while ((pixels = hasNextImage()) != null) {
- * mosaic.setSourceImage(pixels);
- * }
- *
- * mosaic.createMosaic(highRes);
- * byte[] result = mosaic.getFinalMosaic();
- *
- */
-public class Mosaic {
- /**
- * In this mode, the images are stitched together in the same spatial arrangement as acquired
- * i.e. if the user follows a curvy trajectory, the image boundary of the resulting mosaic will
- * be curved in the same manner. This mode is useful if the user wants to capture a mosaic as
- * if "painting" the scene using the smart-phone device and does not want any corrective warps
- * to distort the captured images.
- */
- public static final int BLENDTYPE_FULL = 0;
-
- /**
- * This mode is the same as BLENDTYPE_FULL except that the resulting mosaic is rotated
- * to balance the first and last images to be approximately at the same vertical offset in the
- * output mosaic. This is useful when acquiring a mosaic by a typical panning-like motion to
- * remove a one-sided curve in the mosaic (typically due to the camera not staying horizontal
- * during the video capture) and convert it to a more symmetrical "smiley-face" like output.
- */
- public static final int BLENDTYPE_PAN = 1;
-
- /**
- * This mode compensates for typical "smiley-face" like output in longer mosaics and creates
- * a rectangular mosaic with minimal black borders (by unwrapping the mosaic onto an imaginary
- * cylinder). If the user follows a curved trajectory (instead of a perfect panning trajectory),
- * the resulting mosaic here may suffer from some image distortions in trying to map the
- * trajectory to a cylinder.
- */
- public static final int BLENDTYPE_CYLINDERPAN = 2;
-
- /**
- * This mode is basically BLENDTYPE_CYLINDERPAN plus doing a rectangle cropping before returning
- * the mosaic. The mode is useful for making the resulting mosaic have a rectangle shape.
- */
- public static final int BLENDTYPE_HORIZONTAL =3;
-
- /**
- * This strip type will use the default thin strips where the strips are
- * spaced according to the image capture rate.
- */
- public static final int STRIPTYPE_THIN = 0;
-
- /**
- * This strip type will use wider strips for blending. The strip separation
- * is controlled by a threshold on the native side. Since the strips are
- * wider, there is an additional cross-fade blending step to make the seam
- * boundaries smoother. Since this mode uses lesser image frames, it is
- * computationally more efficient than the thin strip mode.
- */
- public static final int STRIPTYPE_WIDE = 1;
-
- /**
- * Return flags returned by createMosaic() are one of the following.
- */
- public static final int MOSAIC_RET_OK = 1;
- public static final int MOSAIC_RET_ERROR = -1;
- public static final int MOSAIC_RET_CANCELLED = -2;
- public static final int MOSAIC_RET_LOW_TEXTURE = -3;
- public static final int MOSAIC_RET_FEW_INLIERS = 2;
-
-
- static {
- System.loadLibrary("jni_mosaic");
- }
-
- /**
- * Allocate memory for the image frames at the given resolution.
- *
- * @param width width of the input frames in pixels
- * @param height height of the input frames in pixels
- */
- public native void allocateMosaicMemory(int width, int height);
-
- /**
- * Free memory allocated by allocateMosaicMemory.
- *
- */
- public native void freeMosaicMemory();
-
- /**
- * Pass the input image frame to the native layer. Each time the a new
- * source image t is set, the transformation matrix from the first source
- * image to t is computed and returned.
- *
- * @param pixels source image of NV21 format.
- * @return Float array of length 11; first 9 entries correspond to the 3x3
- * transformation matrix between the first frame and the passed frame;
- * the 10th entry is the number of the passed frame, where the counting
- * starts from 1; and the 11th entry is the returning code, whose value
- * is one of those MOSAIC_RET_* returning flags defined above.
- */
- public native float[] setSourceImage(byte[] pixels);
-
- /**
- * This is an alternative to the setSourceImage function above. This should
- * be called when the image data is already on the native side in a fixed
- * byte array. In implementation, this array is filled by the GL thread
- * using glReadPixels directly from GPU memory (where it is accessed by
- * an associated SurfaceTexture).
- *
- * @return Float array of length 11; first 9 entries correspond to the 3x3
- * transformation matrix between the first frame and the passed frame;
- * the 10th entry is the number of the passed frame, where the counting
- * starts from 1; and the 11th entry is the returning code, whose value
- * is one of those MOSAIC_RET_* returning flags defined above.
- */
- public native float[] setSourceImageFromGPU();
-
- /**
- * Set the type of blending.
- *
- * @param type the blending type defined in the class. {BLENDTYPE_FULL,
- * BLENDTYPE_PAN, BLENDTYPE_CYLINDERPAN, BLENDTYPE_HORIZONTAL}
- */
- public native void setBlendingType(int type);
-
- /**
- * Set the type of strips to use for blending.
- * @param type the blending strip type to use {STRIPTYPE_THIN,
- * STRIPTYPE_WIDE}.
- */
- public native void setStripType(int type);
-
- /**
- * Tell the native layer to create the final mosaic after all the input frame
- * data have been collected.
- * The case of generating high-resolution mosaic may take dozens of seconds to finish.
- *
- * @param value True means generating a high-resolution mosaic -
- * which is based on the original images set in setSourceImage().
- * False means generating a low-resolution version -
- * which is based on 1/4 downscaled images from the original images.
- * @return Returns a status code suggesting if the mosaic building was
- * successful, in error, or was cancelled by the user.
- */
- public native int createMosaic(boolean value);
-
- /**
- * Get the data for the created mosaic.
- *
- * @return Returns an integer array which contains the final mosaic in the ARGB_8888 format.
- * The first MosaicWidth*MosaicHeight values contain the image data, followed by 2
- * integers corresponding to the values MosaicWidth and MosaicHeight respectively.
- */
- public native int[] getFinalMosaic();
-
- /**
- * Get the data for the created mosaic.
- *
- * @return Returns a byte array which contains the final mosaic in the NV21 format.
- * The first MosaicWidth*MosaicHeight*1.5 values contain the image data, followed by
- * 8 bytes which pack the MosaicWidth and MosaicHeight integers into 4 bytes each
- * respectively.
- */
- public native byte[] getFinalMosaicNV21();
-
- /**
- * Reset the state of the frame arrays which maintain the captured frame data.
- * Also re-initializes the native mosaic object to make it ready for capturing a new mosaic.
- */
- public native void reset();
-
- /**
- * Get the progress status of the mosaic computation process.
- * @param hires Boolean flag to select whether to report progress of the
- * low-res or high-res mosaicer.
- * @param cancelComputation Boolean flag to allow cancelling the
- * mosaic computation when needed from the GUI end.
- * @return Returns a number from 0-100 where 50 denotes that the mosaic
- * computation is 50% done.
- */
- public native int reportProgress(boolean hires, boolean cancelComputation);
-}
diff --git a/src/com/android/camera/MosaicFrameProcessor.java b/src/com/android/camera/MosaicFrameProcessor.java
deleted file mode 100644
index efd4ad2ae..000000000
--- a/src/com/android/camera/MosaicFrameProcessor.java
+++ /dev/null
@@ -1,237 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.util.Log;
-
-/**
- * Class to handle the processing of each frame by Mosaicer.
- */
-public class MosaicFrameProcessor {
- private static final String TAG = "MosaicFrameProcessor";
- private static final int NUM_FRAMES_IN_BUFFER = 2;
- private static final int MAX_NUMBER_OF_FRAMES = 100;
- private static final int MOSAIC_RET_CODE_INDEX = 10;
- private static final int FRAME_COUNT_INDEX = 9;
- private static final int X_COORD_INDEX = 2;
- private static final int Y_COORD_INDEX = 5;
- private static final int HR_TO_LR_DOWNSAMPLE_FACTOR = 4;
- private static final int WINDOW_SIZE = 3;
-
- private Mosaic mMosaicer;
- private boolean mIsMosaicMemoryAllocated = false;
- private float mTranslationLastX;
- private float mTranslationLastY;
-
- private int mFillIn = 0;
- private int mTotalFrameCount = 0;
- private int mLastProcessFrameIdx = -1;
- private int mCurrProcessFrameIdx = -1;
- private boolean mFirstRun;
-
- // Panning rate is in unit of percentage of image content translation per
- // frame. Use moving average to calculate the panning rate.
- private float mPanningRateX;
- private float mPanningRateY;
-
- private float[] mDeltaX = new float[WINDOW_SIZE];
- private float[] mDeltaY = new float[WINDOW_SIZE];
- private int mOldestIdx = 0;
- private float mTotalTranslationX = 0f;
- private float mTotalTranslationY = 0f;
-
- private ProgressListener mProgressListener;
-
- private int mPreviewWidth;
- private int mPreviewHeight;
- private int mPreviewBufferSize;
-
- private static MosaicFrameProcessor sMosaicFrameProcessor; // singleton
-
- public interface ProgressListener {
- public void onProgress(boolean isFinished, float panningRateX, float panningRateY,
- float progressX, float progressY);
- }
-
- public static MosaicFrameProcessor getInstance() {
- if (sMosaicFrameProcessor == null) {
- sMosaicFrameProcessor = new MosaicFrameProcessor();
- }
- return sMosaicFrameProcessor;
- }
-
- private MosaicFrameProcessor() {
- mMosaicer = new Mosaic();
- }
-
- public void setProgressListener(ProgressListener listener) {
- mProgressListener = listener;
- }
-
- public int reportProgress(boolean hires, boolean cancel) {
- return mMosaicer.reportProgress(hires, cancel);
- }
-
- public void initialize(int previewWidth, int previewHeight, int bufSize) {
- mPreviewWidth = previewWidth;
- mPreviewHeight = previewHeight;
- mPreviewBufferSize = bufSize;
- setupMosaicer(mPreviewWidth, mPreviewHeight, mPreviewBufferSize);
- setStripType(Mosaic.STRIPTYPE_WIDE);
- // no need to call reset() here. reset() should be called by the client
- // after this initialization before calling other methods of this object.
- }
-
- public void clear() {
- if (mIsMosaicMemoryAllocated) {
- mMosaicer.freeMosaicMemory();
- mIsMosaicMemoryAllocated = false;
- }
- synchronized (this) {
- notify();
- }
- }
-
- public boolean isMosaicMemoryAllocated() {
- return mIsMosaicMemoryAllocated;
- }
-
- public void setStripType(int type) {
- mMosaicer.setStripType(type);
- }
-
- private void setupMosaicer(int previewWidth, int previewHeight, int bufSize) {
- Log.v(TAG, "setupMosaicer w, h=" + previewWidth + ',' + previewHeight + ',' + bufSize);
-
- if (mIsMosaicMemoryAllocated) throw new RuntimeException("MosaicFrameProcessor in use!");
- mIsMosaicMemoryAllocated = true;
- mMosaicer.allocateMosaicMemory(previewWidth, previewHeight);
- }
-
- public void reset() {
- // reset() can be called even if MosaicFrameProcessor is not initialized.
- // Only counters will be changed.
- mFirstRun = true;
- mTotalFrameCount = 0;
- mFillIn = 0;
- mTotalTranslationX = 0;
- mTranslationLastX = 0;
- mTotalTranslationY = 0;
- mTranslationLastY = 0;
- mPanningRateX = 0;
- mPanningRateY = 0;
- mLastProcessFrameIdx = -1;
- mCurrProcessFrameIdx = -1;
- for (int i = 0; i < WINDOW_SIZE; ++i) {
- mDeltaX[i] = 0f;
- mDeltaY[i] = 0f;
- }
- mMosaicer.reset();
- }
-
- public int createMosaic(boolean highRes) {
- return mMosaicer.createMosaic(highRes);
- }
-
- public byte[] getFinalMosaicNV21() {
- return mMosaicer.getFinalMosaicNV21();
- }
-
- // Processes the last filled image frame through the mosaicer and
- // updates the UI to show progress.
- // When done, processes and displays the final mosaic.
- public void processFrame() {
- if (!mIsMosaicMemoryAllocated) {
- // clear() is called and buffers are cleared, stop computation.
- // This can happen when the onPause() is called in the activity, but still some frames
- // are not processed yet and thus the callback may be invoked.
- return;
- }
-
- mCurrProcessFrameIdx = mFillIn;
- mFillIn = ((mFillIn + 1) % NUM_FRAMES_IN_BUFFER);
-
- // Check that we are trying to process a frame different from the
- // last one processed (useful if this class was running asynchronously)
- if (mCurrProcessFrameIdx != mLastProcessFrameIdx) {
- mLastProcessFrameIdx = mCurrProcessFrameIdx;
-
- // TODO: make the termination condition regarding reaching
- // MAX_NUMBER_OF_FRAMES solely determined in the library.
- if (mTotalFrameCount < MAX_NUMBER_OF_FRAMES) {
- // If we are still collecting new frames for the current mosaic,
- // process the new frame.
- calculateTranslationRate();
-
- // Publish progress of the ongoing processing
- if (mProgressListener != null) {
- mProgressListener.onProgress(false, mPanningRateX, mPanningRateY,
- mTranslationLastX * HR_TO_LR_DOWNSAMPLE_FACTOR / mPreviewWidth,
- mTranslationLastY * HR_TO_LR_DOWNSAMPLE_FACTOR / mPreviewHeight);
- }
- } else {
- if (mProgressListener != null) {
- mProgressListener.onProgress(true, mPanningRateX, mPanningRateY,
- mTranslationLastX * HR_TO_LR_DOWNSAMPLE_FACTOR / mPreviewWidth,
- mTranslationLastY * HR_TO_LR_DOWNSAMPLE_FACTOR / mPreviewHeight);
- }
- }
- }
- }
-
- public void calculateTranslationRate() {
- float[] frameData = mMosaicer.setSourceImageFromGPU();
- int ret_code = (int) frameData[MOSAIC_RET_CODE_INDEX];
- mTotalFrameCount = (int) frameData[FRAME_COUNT_INDEX];
- float translationCurrX = frameData[X_COORD_INDEX];
- float translationCurrY = frameData[Y_COORD_INDEX];
-
- if (mFirstRun) {
- // First time: no need to update delta values.
- mTranslationLastX = translationCurrX;
- mTranslationLastY = translationCurrY;
- mFirstRun = false;
- return;
- }
-
- // Moving average: remove the oldest translation/deltaTime and
- // add the newest translation/deltaTime in
- int idx = mOldestIdx;
- mTotalTranslationX -= mDeltaX[idx];
- mTotalTranslationY -= mDeltaY[idx];
- mDeltaX[idx] = Math.abs(translationCurrX - mTranslationLastX);
- mDeltaY[idx] = Math.abs(translationCurrY - mTranslationLastY);
- mTotalTranslationX += mDeltaX[idx];
- mTotalTranslationY += mDeltaY[idx];
-
- // The panning rate is measured as the rate of the translation percentage in
- // image width/height. Take the horizontal panning rate for example, the image width
- // used in finding the translation is (PreviewWidth / HR_TO_LR_DOWNSAMPLE_FACTOR).
- // To get the horizontal translation percentage, the horizontal translation,
- // (translationCurrX - mTranslationLastX), is divided by the
- // image width. We then get the rate by dividing the translation percentage with the
- // number of frames.
- mPanningRateX = mTotalTranslationX /
- (mPreviewWidth / HR_TO_LR_DOWNSAMPLE_FACTOR) / WINDOW_SIZE;
- mPanningRateY = mTotalTranslationY /
- (mPreviewHeight / HR_TO_LR_DOWNSAMPLE_FACTOR) / WINDOW_SIZE;
-
- mTranslationLastX = translationCurrX;
- mTranslationLastY = translationCurrY;
- mOldestIdx = (mOldestIdx + 1) % WINDOW_SIZE;
- }
-}
diff --git a/src/com/android/camera/MosaicPreviewRenderer.java b/src/com/android/camera/MosaicPreviewRenderer.java
deleted file mode 100644
index 26ce733aa..000000000
--- a/src/com/android/camera/MosaicPreviewRenderer.java
+++ /dev/null
@@ -1,265 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.annotation.TargetApi;
-import android.graphics.SurfaceTexture;
-import android.os.ConditionVariable;
-import android.os.Handler;
-import android.os.HandlerThread;
-import android.os.Looper;
-import android.os.Message;
-import android.util.Log;
-
-import com.android.gallery3d.common.ApiHelper;
-
-import javax.microedition.khronos.egl.EGL10;
-import javax.microedition.khronos.egl.EGLConfig;
-import javax.microedition.khronos.egl.EGLContext;
-import javax.microedition.khronos.egl.EGLDisplay;
-import javax.microedition.khronos.egl.EGLSurface;
-import javax.microedition.khronos.opengles.GL10;
-
-@TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB) // uses SurfaceTexture
-public class MosaicPreviewRenderer {
- private static final String TAG = "MosaicPreviewRenderer";
- private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098;
- private static final boolean DEBUG = false;
-
- private int mWidth; // width of the view in UI
- private int mHeight; // height of the view in UI
-
- private boolean mIsLandscape = true;
- private final float[] mTransformMatrix = new float[16];
-
- private ConditionVariable mEglThreadBlockVar = new ConditionVariable();
- private HandlerThread mEglThread;
- private EGLHandler mEglHandler;
-
- private EGLConfig mEglConfig;
- private EGLDisplay mEglDisplay;
- private EGLContext mEglContext;
- private EGLSurface mEglSurface;
- private SurfaceTexture mMosaicOutputSurfaceTexture;
- private SurfaceTexture mInputSurfaceTexture;
- private EGL10 mEgl;
- private GL10 mGl;
-
- private class EGLHandler extends Handler {
- public static final int MSG_INIT_EGL_SYNC = 0;
- public static final int MSG_SHOW_PREVIEW_FRAME_SYNC = 1;
- public static final int MSG_SHOW_PREVIEW_FRAME = 2;
- public static final int MSG_ALIGN_FRAME_SYNC = 3;
- public static final int MSG_RELEASE = 4;
-
- public EGLHandler(Looper looper) {
- super(looper);
- }
-
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
- case MSG_INIT_EGL_SYNC:
- doInitGL();
- mEglThreadBlockVar.open();
- break;
- case MSG_SHOW_PREVIEW_FRAME_SYNC:
- doShowPreviewFrame();
- mEglThreadBlockVar.open();
- break;
- case MSG_SHOW_PREVIEW_FRAME:
- doShowPreviewFrame();
- break;
- case MSG_ALIGN_FRAME_SYNC:
- doAlignFrame();
- mEglThreadBlockVar.open();
- break;
- case MSG_RELEASE:
- doRelease();
- mEglThreadBlockVar.open();
- break;
- }
- }
-
- private void doAlignFrame() {
- mInputSurfaceTexture.updateTexImage();
- mInputSurfaceTexture.getTransformMatrix(mTransformMatrix);
-
- MosaicRenderer.setWarping(true);
- // Call preprocess to render it to low-res and high-res RGB textures.
- MosaicRenderer.preprocess(mTransformMatrix);
- // Now, transfer the textures from GPU to CPU memory for processing
- MosaicRenderer.transferGPUtoCPU();
- MosaicRenderer.updateMatrix();
- draw();
- mEgl.eglSwapBuffers(mEglDisplay, mEglSurface);
- }
-
- private void doShowPreviewFrame() {
- mInputSurfaceTexture.updateTexImage();
- mInputSurfaceTexture.getTransformMatrix(mTransformMatrix);
-
- MosaicRenderer.setWarping(false);
- // Call preprocess to render it to low-res and high-res RGB textures.
- MosaicRenderer.preprocess(mTransformMatrix);
- MosaicRenderer.updateMatrix();
- draw();
- mEgl.eglSwapBuffers(mEglDisplay, mEglSurface);
- }
-
- private void doInitGL() {
- // These are copied from GLSurfaceView
- mEgl = (EGL10) EGLContext.getEGL();
- mEglDisplay = mEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
- if (mEglDisplay == EGL10.EGL_NO_DISPLAY) {
- throw new RuntimeException("eglGetDisplay failed");
- }
- int[] version = new int[2];
- if (!mEgl.eglInitialize(mEglDisplay, version)) {
- throw new RuntimeException("eglInitialize failed");
- } else {
- Log.v(TAG, "EGL version: " + version[0] + '.' + version[1]);
- }
- int[] attribList = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE };
- mEglConfig = chooseConfig(mEgl, mEglDisplay);
- mEglContext = mEgl.eglCreateContext(mEglDisplay, mEglConfig, EGL10.EGL_NO_CONTEXT,
- attribList);
-
- if (mEglContext == null || mEglContext == EGL10.EGL_NO_CONTEXT) {
- throw new RuntimeException("failed to createContext");
- }
- mEglSurface = mEgl.eglCreateWindowSurface(
- mEglDisplay, mEglConfig, mMosaicOutputSurfaceTexture, null);
- if (mEglSurface == null || mEglSurface == EGL10.EGL_NO_SURFACE) {
- throw new RuntimeException("failed to createWindowSurface");
- }
-
- if (!mEgl.eglMakeCurrent(mEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
- throw new RuntimeException("failed to eglMakeCurrent");
- }
-
- mGl = (GL10) mEglContext.getGL();
-
- mInputSurfaceTexture = new SurfaceTexture(MosaicRenderer.init());
- MosaicRenderer.reset(mWidth, mHeight, mIsLandscape);
- }
-
- private void doRelease() {
- mEgl.eglDestroySurface(mEglDisplay, mEglSurface);
- mEgl.eglDestroyContext(mEglDisplay, mEglContext);
- mEgl.eglMakeCurrent(mEglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE,
- EGL10.EGL_NO_CONTEXT);
- mEgl.eglTerminate(mEglDisplay);
- mEglSurface = null;
- mEglContext = null;
- mEglDisplay = null;
- releaseSurfaceTexture(mInputSurfaceTexture);
- mEglThread.quit();
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- private void releaseSurfaceTexture(SurfaceTexture st) {
- if (ApiHelper.HAS_RELEASE_SURFACE_TEXTURE) {
- st.release();
- }
- }
-
- // Should be called from other thread.
- public void sendMessageSync(int msg) {
- mEglThreadBlockVar.close();
- sendEmptyMessage(msg);
- mEglThreadBlockVar.block();
- }
-
- }
-
- public MosaicPreviewRenderer(SurfaceTexture tex, int w, int h, boolean isLandscape) {
- mMosaicOutputSurfaceTexture = tex;
- mWidth = w;
- mHeight = h;
- mIsLandscape = isLandscape;
-
- mEglThread = new HandlerThread("PanoramaRealtimeRenderer");
- mEglThread.start();
- mEglHandler = new EGLHandler(mEglThread.getLooper());
-
- // We need to sync this because the generation of surface texture for input is
- // done here and the client will continue with the assumption that the
- // generation is completed.
- mEglHandler.sendMessageSync(EGLHandler.MSG_INIT_EGL_SYNC);
- }
-
- public void release() {
- mEglHandler.sendMessageSync(EGLHandler.MSG_RELEASE);
- }
-
- public void showPreviewFrameSync() {
- mEglHandler.sendMessageSync(EGLHandler.MSG_SHOW_PREVIEW_FRAME_SYNC);
- }
-
- public void showPreviewFrame() {
- mEglHandler.sendEmptyMessage(EGLHandler.MSG_SHOW_PREVIEW_FRAME);
- }
-
- public void alignFrameSync() {
- mEglHandler.sendMessageSync(EGLHandler.MSG_ALIGN_FRAME_SYNC);
- }
-
- public SurfaceTexture getInputSurfaceTexture() {
- return mInputSurfaceTexture;
- }
-
- private void draw() {
- MosaicRenderer.step();
- }
-
- private static void checkEglError(String prompt, EGL10 egl) {
- int error;
- while ((error = egl.eglGetError()) != EGL10.EGL_SUCCESS) {
- Log.e(TAG, String.format("%s: EGL error: 0x%x", prompt, error));
- }
- }
-
- private static final int EGL_OPENGL_ES2_BIT = 4;
- private static final int[] CONFIG_SPEC = new int[] {
- EGL10.EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT,
- EGL10.EGL_RED_SIZE, 8,
- EGL10.EGL_GREEN_SIZE, 8,
- EGL10.EGL_BLUE_SIZE, 8,
- EGL10.EGL_NONE
- };
-
- private static EGLConfig chooseConfig(EGL10 egl, EGLDisplay display) {
- int[] numConfig = new int[1];
- if (!egl.eglChooseConfig(display, CONFIG_SPEC, null, 0, numConfig)) {
- throw new IllegalArgumentException("eglChooseConfig failed");
- }
-
- int numConfigs = numConfig[0];
- if (numConfigs <= 0) {
- throw new IllegalArgumentException("No configs match configSpec");
- }
-
- EGLConfig[] configs = new EGLConfig[numConfigs];
- if (!egl.eglChooseConfig(
- display, CONFIG_SPEC, configs, numConfigs, numConfig)) {
- throw new IllegalArgumentException("eglChooseConfig#2 failed");
- }
-
- return configs[0];
- }
-}
diff --git a/src/com/android/camera/MosaicRenderer.java b/src/com/android/camera/MosaicRenderer.java
deleted file mode 100644
index c50ca0d52..000000000
--- a/src/com/android/camera/MosaicRenderer.java
+++ /dev/null
@@ -1,89 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-/**
- * The Java interface to JNI calls regarding mosaic preview rendering.
- *
- */
-public class MosaicRenderer
-{
- static
- {
- System.loadLibrary("jni_mosaic");
- }
-
- /**
- * Function to be called in onSurfaceCreated() to initialize
- * the GL context, load and link the shaders and create the
- * program. Returns a texture ID to be used for SurfaceTexture.
- *
- * @return textureID the texture ID of the newly generated texture to
- * be assigned to the SurfaceTexture object.
- */
- public static native int init();
-
- /**
- * Pass the drawing surface's width and height to initialize the
- * renderer viewports and FBO dimensions.
- *
- * @param width width of the drawing surface in pixels.
- * @param height height of the drawing surface in pixels.
- * @param isLandscapeOrientation is the orientation of the activity layout in landscape.
- */
- public static native void reset(int width, int height, boolean isLandscapeOrientation);
-
- /**
- * Calling this function will render the SurfaceTexture to a new 2D texture
- * using the provided STMatrix.
- *
- * @param stMatrix texture coordinate transform matrix obtained from the
- * Surface texture
- */
- public static native void preprocess(float[] stMatrix);
-
- /**
- * This function calls glReadPixels to transfer both the low-res and high-res
- * data from the GPU memory to the CPU memory for further processing by the
- * mosaicing library.
- */
- public static native void transferGPUtoCPU();
-
- /**
- * Function to be called in onDrawFrame() to update the screen with
- * the new frame data.
- */
- public static native void step();
-
- /**
- * Call this function when a new low-res frame has been processed by
- * the mosaicing library. This will tell the renderer library to
- * update its texture and warping transformation. Any calls to step()
- * after this call will use the new image frame and transformation data.
- */
- public static native void updateMatrix();
-
- /**
- * This function allows toggling between showing the input image data
- * (without applying any warp) and the warped image data. For running
- * the renderer as a viewfinder, we set the flag to false. To see the
- * preview mosaic, we set the flag to true.
- *
- * @param flag boolean flag to set the warping to true or false.
- */
- public static native void setWarping(boolean flag);
-}
diff --git a/src/com/android/camera/NewCameraActivity.java b/src/com/android/camera/NewCameraActivity.java
deleted file mode 100644
index 59b366419..000000000
--- a/src/com/android/camera/NewCameraActivity.java
+++ /dev/null
@@ -1,433 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.app.Activity;
-import android.content.ComponentName;
-import android.content.ContentResolver;
-import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.content.ServiceConnection;
-import android.content.pm.ActivityInfo;
-import android.content.res.Configuration;
-import android.graphics.drawable.ColorDrawable;
-import android.net.Uri;
-import android.os.Bundle;
-import android.os.IBinder;
-import android.provider.Settings;
-import android.view.LayoutInflater;
-import android.view.MotionEvent;
-import android.view.OrientationEventListener;
-import android.view.View;
-import android.view.ViewGroup;
-import android.view.Window;
-import android.view.WindowManager;
-import android.widget.ImageView;
-
-import com.android.camera.data.CameraDataAdapter;
-import com.android.camera.data.LocalData;
-import com.android.camera.ui.CameraSwitcher.CameraSwitchListener;
-import com.android.camera.ui.FilmStripView;
-import com.android.camera.ui.NewCameraRootView;
-import com.android.gallery3d.R;
-import com.android.gallery3d.common.ApiHelper;
-import com.android.gallery3d.util.LightCycleHelper;
-
-public class NewCameraActivity extends Activity
- implements CameraSwitchListener {
- public static final int PHOTO_MODULE_INDEX = 0;
- public static final int VIDEO_MODULE_INDEX = 1;
- public static final int PANORAMA_MODULE_INDEX = 2;
- public static final int LIGHTCYCLE_MODULE_INDEX = 3;
- private static final String INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE =
- "android.media.action.STILL_IMAGE_CAMERA_SECURE";
- public static final String ACTION_IMAGE_CAPTURE_SECURE =
- "android.media.action.IMAGE_CAPTURE_SECURE";
- // The intent extra for camera from secure lock screen. True if the gallery
- // should only show newly captured pictures. sSecureAlbumId does not
- // increment. This is used when switching between camera, camcorder, and
- // panorama. If the extra is not set, it is in the normal camera mode.
- public static final String SECURE_CAMERA_EXTRA = "secure_camera";
-
- private static final String TAG = "CAM_Activity";
- private CameraDataAdapter mDataAdapter;
- private int mCurrentModuleIndex;
- private NewCameraModule mCurrentModule;
- private View mRootView;
- private FilmStripView mFilmStripView;
- private int mResultCodeForTesting;
- private Intent mResultDataForTesting;
- private OnScreenHint mStorageHint;
- private long mStorageSpace = Storage.LOW_STORAGE_THRESHOLD;
- private PhotoModule mController;
- private boolean mAutoRotateScreen;
- private boolean mSecureCamera;
- private int mLastRawOrientation;
- private MyOrientationEventListener mOrientationListener;
- private class MyOrientationEventListener
- extends OrientationEventListener {
- public MyOrientationEventListener(Context context) {
- super(context);
- }
-
- @Override
- public void onOrientationChanged(int orientation) {
- // We keep the last known orientation. So if the user first orient
- // the camera then point the camera to floor or sky, we still have
- // the correct orientation.
- if (orientation == ORIENTATION_UNKNOWN) return;
- mLastRawOrientation = orientation;
- mCurrentModule.onOrientationChanged(orientation);
- }
- }
- private MediaSaveService mMediaSaveService;
- private ServiceConnection mConnection = new ServiceConnection() {
- @Override
- public void onServiceConnected(ComponentName className, IBinder b) {
- mMediaSaveService = ((MediaSaveService.LocalBinder) b).getService();
- mCurrentModule.onMediaSaveServiceConnected(mMediaSaveService);
- }
- @Override
- public void onServiceDisconnected(ComponentName className) {
- mMediaSaveService = null;
- }};
-
- public MediaSaveService getMediaSaveService() {
- return mMediaSaveService;
- }
-
- public void notifyNewMedia(Uri uri) {
- ContentResolver cr = getContentResolver();
- String mimeType = cr.getType(uri);
- if (mimeType.startsWith("video/")) {
- sendBroadcast(new Intent(Util.ACTION_NEW_VIDEO, uri));
- mDataAdapter.addNewVideo(cr, uri);
- } else if (mimeType.startsWith("image/")) {
- Util.broadcastNewPicture(this, uri);
- mDataAdapter.addNewPhoto(cr, uri);
- } else {
- android.util.Log.w(TAG, "Unknown new media with MIME type:"
- + mimeType + ", uri:" + uri);
- }
- }
-
- private void bindMediaSaveService() {
- Intent intent = new Intent(this, MediaSaveService.class);
- startService(intent); // start service before binding it so the
- // service won't be killed if we unbind it.
- bindService(intent, mConnection, Context.BIND_AUTO_CREATE);
- }
-
- private void unbindMediaSaveService() {
- if (mMediaSaveService != null) {
- mMediaSaveService.setListener(null);
- }
- if (mConnection != null) {
- unbindService(mConnection);
- }
- }
-
- @Override
- public void onCreate(Bundle state) {
- super.onCreate(state);
- setContentView(R.layout.camera_filmstrip);
- if (ApiHelper.HAS_ROTATION_ANIMATION) {
- setRotationAnimation();
- }
- // Check if this is in the secure camera mode.
- Intent intent = getIntent();
- String action = intent.getAction();
- if (INTENT_ACTION_STILL_IMAGE_CAMERA_SECURE.equals(action)) {
- mSecureCamera = true;
- } else if (ACTION_IMAGE_CAPTURE_SECURE.equals(action)) {
- mSecureCamera = true;
- } else {
- mSecureCamera = intent.getBooleanExtra(SECURE_CAMERA_EXTRA, false);
- }
- /*TODO: if (mSecureCamera) {
- IntentFilter filter = new IntentFilter(Intent.ACTION_SCREEN_OFF);
- registerReceiver(mScreenOffReceiver, filter);
- if (sScreenOffReceiver == null) {
- sScreenOffReceiver = new ScreenOffReceiver();
- getApplicationContext().registerReceiver(sScreenOffReceiver, filter);
- }
- }*/
- LayoutInflater inflater = getLayoutInflater();
- View rootLayout = inflater.inflate(R.layout.camera, null, false);
- mRootView = rootLayout.findViewById(R.id.camera_app_root);
- mDataAdapter = new CameraDataAdapter(
- new ColorDrawable(getResources().getColor(R.color.photo_placeholder)));
- mFilmStripView = (FilmStripView) findViewById(R.id.filmstrip_view);
- mFilmStripView.setViewGap(
- getResources().getDimensionPixelSize(R.dimen.camera_film_strip_gap));
- // Set up the camera preview first so the preview shows up ASAP.
- mDataAdapter.setCameraPreviewInfo(rootLayout,
- FilmStripView.ImageData.SIZE_FULL, FilmStripView.ImageData.SIZE_FULL);
- mFilmStripView.setDataAdapter(mDataAdapter);
- mFilmStripView.setListener(new FilmStripView.Listener() {
- @Override
- public void onDataPromoted(int dataID) {
- mDataAdapter.removeData(dataID);
- }
-
- @Override
- public void onDataDemoted(int dataID) {
- mDataAdapter.removeData(dataID);
- }
-
- @Override
- public void onDataFullScreenChange(int dataID, boolean full) {
- }
- });
- mCurrentModule = new NewPhotoModule();
- mCurrentModule.init(this, mRootView);
- mOrientationListener = new MyOrientationEventListener(this);
- bindMediaSaveService();
- }
-
- private void setRotationAnimation() {
- int rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_ROTATE;
- rotationAnimation = WindowManager.LayoutParams.ROTATION_ANIMATION_CROSSFADE;
- Window win = getWindow();
- WindowManager.LayoutParams winParams = win.getAttributes();
- winParams.rotationAnimation = rotationAnimation;
- win.setAttributes(winParams);
- }
-
- @Override
- public void onUserInteraction() {
- super.onUserInteraction();
- mCurrentModule.onUserInteraction();
- }
-
- @Override
- public void onPause() {
- mOrientationListener.disable();
- mCurrentModule.onPauseBeforeSuper();
- super.onPause();
- mCurrentModule.onPauseAfterSuper();
- }
-
- @Override
- public void onResume() {
- if (Settings.System.getInt(getContentResolver(),
- Settings.System.ACCELEROMETER_ROTATION, 0) == 0) {// auto-rotate off
- setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_UNSPECIFIED);
- mAutoRotateScreen = false;
- } else {
- setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_FULL_SENSOR);
- mAutoRotateScreen = true;
- }
- mOrientationListener.enable();
- mCurrentModule.onResumeBeforeSuper();
- super.onResume();
- mCurrentModule.onResumeAfterSuper();
-
- // The loading is done in background and will update the filmstrip later.
- if (!mSecureCamera) {
- mDataAdapter.requestLoad(getContentResolver());
- } else {
- // Flush out all the original data first.
- mDataAdapter.flush();
- ImageView v = (ImageView) getLayoutInflater().inflate(
- R.layout.secure_album_placeholder, null);
- // Put a lock placeholder as the last image by setting its date to 0.
- mDataAdapter.addLocalData(
- new LocalData.LocalViewData(
- v,
- v.getDrawable().getIntrinsicWidth(),
- v.getDrawable().getIntrinsicHeight(),
- 0, 0));
- }
- setSwipingEnabled(true);
- }
-
- @Override
- public void onDestroy() {
- unbindMediaSaveService();
- super.onDestroy();
- }
-
- @Override
- public void onConfigurationChanged(Configuration config) {
- super.onConfigurationChanged(config);
- mCurrentModule.onConfigurationChanged(config);
- }
-
- @Override
- public boolean dispatchTouchEvent(MotionEvent m) {
- return mFilmStripView.dispatchTouchEvent(m);
- }
- public boolean isAutoRotateScreen() {
- return mAutoRotateScreen;
- }
-
- protected void updateStorageSpace() {
- mStorageSpace = Storage.getAvailableSpace();
- }
-
- protected long getStorageSpace() {
- return mStorageSpace;
- }
-
- protected void updateStorageSpaceAndHint() {
- updateStorageSpace();
- updateStorageHint(mStorageSpace);
- }
-
- protected void updateStorageHint() {
- updateStorageHint(mStorageSpace);
- }
-
- protected boolean updateStorageHintOnResume() {
- return true;
- }
-
- protected void updateStorageHint(long storageSpace) {
- String message = null;
- if (storageSpace == Storage.UNAVAILABLE) {
- message = getString(R.string.no_storage);
- } else if (storageSpace == Storage.PREPARING) {
- message = getString(R.string.preparing_sd);
- } else if (storageSpace == Storage.UNKNOWN_SIZE) {
- message = getString(R.string.access_sd_fail);
- } else if (storageSpace <= Storage.LOW_STORAGE_THRESHOLD) {
- message = getString(R.string.spaceIsLow_content);
- }
-
- if (message != null) {
- if (mStorageHint == null) {
- mStorageHint = OnScreenHint.makeText(this, message);
- } else {
- mStorageHint.setText(message);
- }
- mStorageHint.show();
- } else if (mStorageHint != null) {
- mStorageHint.cancel();
- mStorageHint = null;
- }
- }
-
- protected void setResultEx(int resultCode) {
- mResultCodeForTesting = resultCode;
- setResult(resultCode);
- }
-
- protected void setResultEx(int resultCode, Intent data) {
- mResultCodeForTesting = resultCode;
- mResultDataForTesting = data;
- setResult(resultCode, data);
- }
-
- public int getResultCode() {
- return mResultCodeForTesting;
- }
-
- public Intent getResultData() {
- return mResultDataForTesting;
- }
-
- public boolean isSecureCamera() {
- return mSecureCamera;
- }
-
- @Override
- public void onCameraSelected(int i) {
- if (mCurrentModuleIndex == i) return;
-
- CameraHolder.instance().keep();
- closeModule(mCurrentModule);
- mCurrentModuleIndex = i;
- switch (i) {
- case VIDEO_MODULE_INDEX:
- mCurrentModule = new NewVideoModule();
- break;
- case PHOTO_MODULE_INDEX:
- mCurrentModule = new NewPhotoModule();
- break;
- /* TODO:
- case LIGHTCYCLE_MODULE_INDEX:
- mCurrentModule = LightCycleHelper.createPanoramaModule();
- break; */
- default:
- break;
- }
-
- openModule(mCurrentModule);
- mCurrentModule.onOrientationChanged(mLastRawOrientation);
- if (mMediaSaveService != null) {
- mCurrentModule.onMediaSaveServiceConnected(mMediaSaveService);
- }
- }
-
- private void openModule(NewCameraModule module) {
- module.init(this, mRootView);
- module.onResumeBeforeSuper();
- module.onResumeAfterSuper();
- }
-
- private void closeModule(NewCameraModule module) {
- module.onPauseBeforeSuper();
- module.onPauseAfterSuper();
- ((ViewGroup) mRootView).removeAllViews();
- }
-
- @Override
- public void onShowSwitcherPopup() {
- }
-
- public void setSwipingEnabled(boolean enable) {
- mDataAdapter.setCameraPreviewLock(!enable);
- }
-
- // Accessor methods for getting latency times used in performance testing
- public long getAutoFocusTime() {
- return (mCurrentModule instanceof PhotoModule) ?
- ((PhotoModule) mCurrentModule).mAutoFocusTime : -1;
- }
-
- public long getShutterLag() {
- return (mCurrentModule instanceof PhotoModule) ?
- ((PhotoModule) mCurrentModule).mShutterLag : -1;
- }
-
- public long getShutterToPictureDisplayedTime() {
- return (mCurrentModule instanceof PhotoModule) ?
- ((PhotoModule) mCurrentModule).mShutterToPictureDisplayedTime : -1;
- }
-
- public long getPictureDisplayedToJpegCallbackTime() {
- return (mCurrentModule instanceof PhotoModule) ?
- ((PhotoModule) mCurrentModule).mPictureDisplayedToJpegCallbackTime : -1;
- }
-
- public long getJpegCallbackFinishTime() {
- return (mCurrentModule instanceof PhotoModule) ?
- ((PhotoModule) mCurrentModule).mJpegCallbackFinishTime : -1;
- }
-
- public long getCaptureStartTime() {
- return (mCurrentModule instanceof PhotoModule) ?
- ((PhotoModule) mCurrentModule).mCaptureStartTime : -1;
- }
-
- public boolean isRecording() {
- return (mCurrentModule instanceof VideoModule) ?
- ((VideoModule) mCurrentModule).isRecording() : false;
- }
-}
diff --git a/src/com/android/camera/NewCameraModule.java b/src/com/android/camera/NewCameraModule.java
deleted file mode 100644
index 35452bb3c..000000000
--- a/src/com/android/camera/NewCameraModule.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.content.Intent;
-import android.content.res.Configuration;
-import android.view.KeyEvent;
-import android.view.MotionEvent;
-import android.view.View;
-
-public interface NewCameraModule {
-
- public void init(NewCameraActivity activity, View frame);
-
- public void onFullScreenChanged(boolean full);
-
- public void onPauseBeforeSuper();
-
- public void onPauseAfterSuper();
-
- public void onResumeBeforeSuper();
-
- public void onResumeAfterSuper();
-
- public void onConfigurationChanged(Configuration config);
-
- public void onStop();
-
- public void installIntentFilter();
-
- public void onActivityResult(int requestCode, int resultCode, Intent data);
-
- public boolean onBackPressed();
-
- public boolean onKeyDown(int keyCode, KeyEvent event);
-
- public boolean onKeyUp(int keyCode, KeyEvent event);
-
- public void onSingleTapUp(View view, int x, int y);
-
- public void onPreviewTextureCopied();
-
- public void onCaptureTextureCopied();
-
- public void onUserInteraction();
-
- public boolean updateStorageHintOnResume();
-
- public void updateCameraAppView();
-
- public boolean needsSwitcher();
-
- public boolean needsPieMenu();
-
- public void onOrientationChanged(int orientation);
-
- public void onShowSwitcherPopup();
-
- public void onMediaSaveServiceConnected(MediaSaveService s);
-}
diff --git a/src/com/android/camera/NewPhotoMenu.java b/src/com/android/camera/NewPhotoMenu.java
deleted file mode 100644
index c63aff458..000000000
--- a/src/com/android/camera/NewPhotoMenu.java
+++ /dev/null
@@ -1,200 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.content.res.Resources;
-import android.hardware.Camera.Parameters;
-
-import com.android.camera.ui.AbstractSettingPopup;
-import com.android.camera.ui.CountdownTimerPopup;
-import com.android.camera.ui.ListPrefSettingPopup;
-import com.android.camera.ui.PieItem;
-import com.android.camera.ui.PieItem.OnClickListener;
-import com.android.camera.ui.PieRenderer;
-import com.android.gallery3d.R;
-
-import java.util.Locale;
-
-public class NewPhotoMenu extends PieController
- implements CountdownTimerPopup.Listener,
- ListPrefSettingPopup.Listener {
- private static String TAG = "CAM_photomenu";
-
- private final String mSettingOff;
-
- private NewPhotoUI mUI;
- private AbstractSettingPopup mPopup;
- private NewCameraActivity mActivity;
-
- public NewPhotoMenu(NewCameraActivity activity, NewPhotoUI ui, PieRenderer pie) {
- super(activity, pie);
- mUI = ui;
- mSettingOff = activity.getString(R.string.setting_off_value);
- mActivity = activity;
- }
-
- public void initialize(PreferenceGroup group) {
- super.initialize(group);
- mPopup = null;
- PieItem item = null;
- final Resources res = mActivity.getResources();
- Locale locale = res.getConfiguration().locale;
- // the order is from left to right in the menu
-
- // hdr
- if (group.findPreference(CameraSettings.KEY_CAMERA_HDR) != null) {
- item = makeSwitchItem(CameraSettings.KEY_CAMERA_HDR, true);
- mRenderer.addItem(item);
- }
- // exposure compensation
- if (group.findPreference(CameraSettings.KEY_EXPOSURE) != null) {
- item = makeItem(CameraSettings.KEY_EXPOSURE);
- item.setLabel(res.getString(R.string.pref_exposure_label));
- mRenderer.addItem(item);
- }
- // more settings
- PieItem more = makeItem(R.drawable.ic_settings_holo_light);
- more.setLabel(res.getString(R.string.camera_menu_more_label));
- mRenderer.addItem(more);
- // flash
- if (group.findPreference(CameraSettings.KEY_FLASH_MODE) != null) {
- item = makeItem(CameraSettings.KEY_FLASH_MODE);
- item.setLabel(res.getString(R.string.pref_camera_flashmode_label));
- mRenderer.addItem(item);
- }
- // camera switcher
- if (group.findPreference(CameraSettings.KEY_CAMERA_ID) != null) {
- item = makeSwitchItem(CameraSettings.KEY_CAMERA_ID, false);
- final PieItem fitem = item;
- item.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(PieItem item) {
- // Find the index of next camera.
- ListPreference pref = mPreferenceGroup
- .findPreference(CameraSettings.KEY_CAMERA_ID);
- if (pref != null) {
- int index = pref.findIndexOfValue(pref.getValue());
- CharSequence[] values = pref.getEntryValues();
- index = (index + 1) % values.length;
- pref.setValueIndex(index);
- mListener.onCameraPickerClicked(index);
- }
- updateItem(fitem, CameraSettings.KEY_CAMERA_ID);
- }
- });
- mRenderer.addItem(item);
- }
- // location
- if (group.findPreference(CameraSettings.KEY_RECORD_LOCATION) != null) {
- item = makeSwitchItem(CameraSettings.KEY_RECORD_LOCATION, true);
- more.addItem(item);
- if (mActivity.isSecureCamera()) {
- // Prevent location preference from getting changed in secure camera mode
- item.setEnabled(false);
- }
- }
- // countdown timer
- final ListPreference ctpref = group.findPreference(CameraSettings.KEY_TIMER);
- final ListPreference beeppref = group.findPreference(CameraSettings.KEY_TIMER_SOUND_EFFECTS);
- item = makeItem(R.drawable.ic_timer);
- item.setLabel(res.getString(R.string.pref_camera_timer_title).toUpperCase(locale));
- item.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(PieItem item) {
- CountdownTimerPopup timerPopup = (CountdownTimerPopup) mActivity.getLayoutInflater().inflate(
- R.layout.countdown_setting_popup, null, false);
- timerPopup.initialize(ctpref, beeppref);
- timerPopup.setSettingChangedListener(NewPhotoMenu.this);
- mUI.dismissPopup();
- mPopup = timerPopup;
- mUI.showPopup(mPopup);
- }
- });
- more.addItem(item);
- // image size
- item = makeItem(R.drawable.ic_imagesize);
- final ListPreference sizePref = group.findPreference(CameraSettings.KEY_PICTURE_SIZE);
- item.setLabel(res.getString(R.string.pref_camera_picturesize_title).toUpperCase(locale));
- item.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(PieItem item) {
- ListPrefSettingPopup popup = (ListPrefSettingPopup) mActivity.getLayoutInflater().inflate(
- R.layout.list_pref_setting_popup, null, false);
- popup.initialize(sizePref);
- popup.setSettingChangedListener(NewPhotoMenu.this);
- mUI.dismissPopup();
- mPopup = popup;
- mUI.showPopup(mPopup);
- }
- });
- more.addItem(item);
- // white balance
- if (group.findPreference(CameraSettings.KEY_WHITE_BALANCE) != null) {
- item = makeItem(CameraSettings.KEY_WHITE_BALANCE);
- item.setLabel(res.getString(R.string.pref_camera_whitebalance_label));
- more.addItem(item);
- }
- // scene mode
- if (group.findPreference(CameraSettings.KEY_SCENE_MODE) != null) {
- IconListPreference pref = (IconListPreference) group.findPreference(
- CameraSettings.KEY_SCENE_MODE);
- pref.setUseSingleIcon(true);
- item = makeItem(CameraSettings.KEY_SCENE_MODE);
- more.addItem(item);
- }
- }
-
- @Override
- // Hit when an item in a popup gets selected
- public void onListPrefChanged(ListPreference pref) {
- if (mPopup != null) {
- mUI.dismissPopup();
- }
- onSettingChanged(pref);
- }
-
- public void popupDismissed() {
- if (mPopup != null) {
- mPopup = null;
- }
- }
-
- // Return true if the preference has the specified key but not the value.
- private static boolean notSame(ListPreference pref, String key, String value) {
- return (key.equals(pref.getKey()) && !value.equals(pref.getValue()));
- }
-
- private void setPreference(String key, String value) {
- ListPreference pref = mPreferenceGroup.findPreference(key);
- if (pref != null && !value.equals(pref.getValue())) {
- pref.setValue(value);
- reloadPreferences();
- }
- }
-
- @Override
- public void onSettingChanged(ListPreference pref) {
- // Reset the scene mode if HDR is set to on. Reset HDR if scene mode is
- // set to non-auto.
- if (notSame(pref, CameraSettings.KEY_CAMERA_HDR, mSettingOff)) {
- setPreference(CameraSettings.KEY_SCENE_MODE, Parameters.SCENE_MODE_AUTO);
- } else if (notSame(pref, CameraSettings.KEY_SCENE_MODE, Parameters.SCENE_MODE_AUTO)) {
- setPreference(CameraSettings.KEY_CAMERA_HDR, mSettingOff);
- }
- super.onSettingChanged(pref);
- }
-}
diff --git a/src/com/android/camera/NewPhotoModule.java b/src/com/android/camera/NewPhotoModule.java
deleted file mode 100644
index cd3281476..000000000
--- a/src/com/android/camera/NewPhotoModule.java
+++ /dev/null
@@ -1,2032 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.annotation.TargetApi;
-import android.app.Activity;
-import android.app.AlertDialog;
-import android.content.ContentProviderClient;
-import android.content.ContentResolver;
-import android.content.Context;
-import android.content.DialogInterface;
-import android.content.Intent;
-import android.content.SharedPreferences.Editor;
-import android.content.res.Configuration;
-import android.graphics.Bitmap;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera.CameraInfo;
-import android.hardware.Camera.Parameters;
-import android.hardware.Camera.PictureCallback;
-import android.hardware.Camera.Size;
-import android.hardware.Sensor;
-import android.hardware.SensorEvent;
-import android.hardware.SensorEventListener;
-import android.hardware.SensorManager;
-import android.location.Location;
-import android.media.CameraProfile;
-import android.net.Uri;
-import android.os.Bundle;
-import android.os.ConditionVariable;
-import android.os.Handler;
-import android.os.Looper;
-import android.os.Message;
-import android.os.MessageQueue;
-import android.os.SystemClock;
-import android.provider.MediaStore;
-import android.util.Log;
-import android.view.KeyEvent;
-import android.view.MotionEvent;
-import android.view.OrientationEventListener;
-import android.view.SurfaceHolder;
-import android.view.View;
-import android.view.WindowManager;
-
-import com.android.camera.CameraManager.CameraProxy;
-import com.android.camera.ui.CountDownView.OnCountDownFinishedListener;
-import com.android.camera.ui.PopupManager;
-import com.android.camera.ui.RotateTextToast;
-import com.android.gallery3d.R;
-import com.android.gallery3d.common.ApiHelper;
-import com.android.gallery3d.exif.ExifInterface;
-import com.android.gallery3d.exif.ExifTag;
-import com.android.gallery3d.exif.Rational;
-import com.android.gallery3d.filtershow.crop.CropActivity;
-import com.android.gallery3d.filtershow.crop.CropExtras;
-import com.android.gallery3d.util.UsageStatistics;
-
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.Formatter;
-import java.util.List;
-
-public class NewPhotoModule
- implements NewCameraModule,
- PhotoController,
- FocusOverlayManager.Listener,
- CameraPreference.OnPreferenceChangedListener,
- ShutterButton.OnShutterButtonListener,
- MediaSaveService.Listener,
- OnCountDownFinishedListener,
- SensorEventListener {
-
- private static final String TAG = "CAM_PhotoModule";
-
- // We number the request code from 1000 to avoid collision with Gallery.
- private static final int REQUEST_CROP = 1000;
-
- private static final int SETUP_PREVIEW = 1;
- private static final int FIRST_TIME_INIT = 2;
- private static final int CLEAR_SCREEN_DELAY = 3;
- private static final int SET_CAMERA_PARAMETERS_WHEN_IDLE = 4;
- private static final int CHECK_DISPLAY_ROTATION = 5;
- private static final int SHOW_TAP_TO_FOCUS_TOAST = 6;
- private static final int SWITCH_CAMERA = 7;
- private static final int SWITCH_CAMERA_START_ANIMATION = 8;
- private static final int CAMERA_OPEN_DONE = 9;
- private static final int START_PREVIEW_DONE = 10;
- private static final int OPEN_CAMERA_FAIL = 11;
- private static final int CAMERA_DISABLED = 12;
- private static final int CAPTURE_ANIMATION_DONE = 13;
-
- // The subset of parameters we need to update in setCameraParameters().
- private static final int UPDATE_PARAM_INITIALIZE = 1;
- private static final int UPDATE_PARAM_ZOOM = 2;
- private static final int UPDATE_PARAM_PREFERENCE = 4;
- private static final int UPDATE_PARAM_ALL = -1;
-
- // This is the timeout to keep the camera in onPause for the first time
- // after screen on if the activity is started from secure lock screen.
- private static final int KEEP_CAMERA_TIMEOUT = 1000; // ms
-
- // copied from Camera hierarchy
- private NewCameraActivity mActivity;
- private CameraProxy mCameraDevice;
- private int mCameraId;
- private Parameters mParameters;
- private boolean mPaused;
-
- private NewPhotoUI mUI;
-
- // The activity is going to switch to the specified camera id. This is
- // needed because texture copy is done in GL thread. -1 means camera is not
- // switching.
- protected int mPendingSwitchCameraId = -1;
- private boolean mOpenCameraFail;
- private boolean mCameraDisabled;
-
- // When setCameraParametersWhenIdle() is called, we accumulate the subsets
- // needed to be updated in mUpdateSet.
- private int mUpdateSet;
-
- private static final int SCREEN_DELAY = 2 * 60 * 1000;
-
- private int mZoomValue; // The current zoom value.
-
- private Parameters mInitialParams;
- private boolean mFocusAreaSupported;
- private boolean mMeteringAreaSupported;
- private boolean mAeLockSupported;
- private boolean mAwbLockSupported;
- private boolean mContinousFocusSupported;
-
- // The degrees of the device rotated clockwise from its natural orientation.
- private int mOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
- private ComboPreferences mPreferences;
-
- private static final String sTempCropFilename = "crop-temp";
-
- private ContentProviderClient mMediaProviderClient;
- private boolean mFaceDetectionStarted = false;
-
- // mCropValue and mSaveUri are used only if isImageCaptureIntent() is true.
- private String mCropValue;
- private Uri mSaveUri;
-
- // We use a queue to generated names of the images to be used later
- // when the image is ready to be saved.
- private NamedImages mNamedImages;
-
- private Runnable mDoSnapRunnable = new Runnable() {
- @Override
- public void run() {
- onShutterButtonClick();
- }
- };
-
- private Runnable mFlashRunnable = new Runnable() {
- @Override
- public void run() {
- animateFlash();
- }
- };
-
- private final StringBuilder mBuilder = new StringBuilder();
- private final Formatter mFormatter = new Formatter(mBuilder);
- private final Object[] mFormatterArgs = new Object[1];
-
- /**
- * An unpublished intent flag requesting to return as soon as capturing
- * is completed.
- *
- * TODO: consider publishing by moving into MediaStore.
- */
- private static final String EXTRA_QUICK_CAPTURE =
- "android.intent.extra.quickCapture";
-
- // The display rotation in degrees. This is only valid when mCameraState is
- // not PREVIEW_STOPPED.
- private int mDisplayRotation;
- // The value for android.hardware.Camera.setDisplayOrientation.
- private int mCameraDisplayOrientation;
- // The value for UI components like indicators.
- private int mDisplayOrientation;
- // The value for android.hardware.Camera.Parameters.setRotation.
- private int mJpegRotation;
- private boolean mFirstTimeInitialized;
- private boolean mIsImageCaptureIntent;
-
- private int mCameraState = PREVIEW_STOPPED;
- private boolean mSnapshotOnIdle = false;
-
- private ContentResolver mContentResolver;
-
- private LocationManager mLocationManager;
-
- private final PostViewPictureCallback mPostViewPictureCallback =
- new PostViewPictureCallback();
- private final RawPictureCallback mRawPictureCallback =
- new RawPictureCallback();
- private final AutoFocusCallback mAutoFocusCallback =
- new AutoFocusCallback();
- private final Object mAutoFocusMoveCallback =
- ApiHelper.HAS_AUTO_FOCUS_MOVE_CALLBACK
- ? new AutoFocusMoveCallback()
- : null;
-
- private final CameraErrorCallback mErrorCallback = new CameraErrorCallback();
-
- private long mFocusStartTime;
- private long mShutterCallbackTime;
- private long mPostViewPictureCallbackTime;
- private long mRawPictureCallbackTime;
- private long mJpegPictureCallbackTime;
- private long mOnResumeTime;
- private byte[] mJpegImageData;
-
- // These latency time are for the CameraLatency test.
- public long mAutoFocusTime;
- public long mShutterLag;
- public long mShutterToPictureDisplayedTime;
- public long mPictureDisplayedToJpegCallbackTime;
- public long mJpegCallbackFinishTime;
- public long mCaptureStartTime;
-
- // This handles everything about focus.
- private FocusOverlayManager mFocusManager;
-
- private String mSceneMode;
-
- private final Handler mHandler = new MainHandler();
- private PreferenceGroup mPreferenceGroup;
-
- private boolean mQuickCapture;
- private SensorManager mSensorManager;
- private float[] mGData = new float[3];
- private float[] mMData = new float[3];
- private float[] mR = new float[16];
- private int mHeading = -1;
-
- CameraStartUpThread mCameraStartUpThread;
- ConditionVariable mStartPreviewPrerequisiteReady = new ConditionVariable();
-
- private MediaSaveService.OnMediaSavedListener mOnMediaSavedListener =
- new MediaSaveService.OnMediaSavedListener() {
- @Override
- public void onMediaSaved(Uri uri) {
- if (uri != null) {
- mActivity.notifyNewMedia(uri);
- }
- }
- };
-
- // The purpose is not to block the main thread in onCreate and onResume.
- private class CameraStartUpThread extends Thread {
- private volatile boolean mCancelled;
-
- public void cancel() {
- mCancelled = true;
- interrupt();
- }
-
- public boolean isCanceled() {
- return mCancelled;
- }
-
- @Override
- public void run() {
- try {
- // We need to check whether the activity is paused before long
- // operations to ensure that onPause() can be done ASAP.
- if (mCancelled) return;
- mCameraDevice = Util.openCamera(mActivity, mCameraId);
- mParameters = mCameraDevice.getParameters();
- // Wait until all the initialization needed by startPreview are
- // done.
- mStartPreviewPrerequisiteReady.block();
-
- initializeCapabilities();
- if (mFocusManager == null) initializeFocusManager();
- if (mCancelled) return;
- setCameraParameters(UPDATE_PARAM_ALL);
- mHandler.sendEmptyMessage(CAMERA_OPEN_DONE);
- if (mCancelled) return;
- startPreview();
- mHandler.sendEmptyMessage(START_PREVIEW_DONE);
- mOnResumeTime = SystemClock.uptimeMillis();
- mHandler.sendEmptyMessage(CHECK_DISPLAY_ROTATION);
- } catch (CameraHardwareException e) {
- mHandler.sendEmptyMessage(OPEN_CAMERA_FAIL);
- } catch (CameraDisabledException e) {
- mHandler.sendEmptyMessage(CAMERA_DISABLED);
- }
- }
- }
-
- /**
- * This Handler is used to post message back onto the main thread of the
- * application
- */
- private class MainHandler extends Handler {
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
- case SETUP_PREVIEW: {
- setupPreview();
- break;
- }
-
- case CLEAR_SCREEN_DELAY: {
- mActivity.getWindow().clearFlags(
- WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- break;
- }
-
- case FIRST_TIME_INIT: {
- initializeFirstTime();
- break;
- }
-
- case SET_CAMERA_PARAMETERS_WHEN_IDLE: {
- setCameraParametersWhenIdle(0);
- break;
- }
-
- case CHECK_DISPLAY_ROTATION: {
- // Set the display orientation if display rotation has changed.
- // Sometimes this happens when the device is held upside
- // down and camera app is opened. Rotation animation will
- // take some time and the rotation value we have got may be
- // wrong. Framework does not have a callback for this now.
- if (Util.getDisplayRotation(mActivity) != mDisplayRotation) {
- setDisplayOrientation();
- }
- if (SystemClock.uptimeMillis() - mOnResumeTime < 5000) {
- mHandler.sendEmptyMessageDelayed(CHECK_DISPLAY_ROTATION, 100);
- }
- break;
- }
-
- case SHOW_TAP_TO_FOCUS_TOAST: {
- showTapToFocusToast();
- break;
- }
-
- case SWITCH_CAMERA: {
- switchCamera();
- break;
- }
-
- case SWITCH_CAMERA_START_ANIMATION: {
- // TODO: Need to revisit
- // ((CameraScreenNail) mActivity.mCameraScreenNail).animateSwitchCamera();
- break;
- }
-
- case CAMERA_OPEN_DONE: {
- onCameraOpened();
- break;
- }
-
- case START_PREVIEW_DONE: {
- onPreviewStarted();
- break;
- }
-
- case OPEN_CAMERA_FAIL: {
- mCameraStartUpThread = null;
- mOpenCameraFail = true;
- Util.showErrorAndFinish(mActivity,
- R.string.cannot_connect_camera);
- break;
- }
-
- case CAMERA_DISABLED: {
- mCameraStartUpThread = null;
- mCameraDisabled = true;
- Util.showErrorAndFinish(mActivity,
- R.string.camera_disabled);
- break;
- }
- case CAPTURE_ANIMATION_DONE: {
- mUI.enablePreviewThumb(false);
- break;
- }
- }
- }
- }
-
- @Override
- public void init(NewCameraActivity activity, View parent) {
- mActivity = activity;
- mUI = new NewPhotoUI(activity, this, parent);
- mPreferences = new ComboPreferences(mActivity);
- CameraSettings.upgradeGlobalPreferences(mPreferences.getGlobal());
- mCameraId = getPreferredCameraId(mPreferences);
-
- mContentResolver = mActivity.getContentResolver();
-
- // To reduce startup time, open the camera and start the preview in
- // another thread.
- mCameraStartUpThread = new CameraStartUpThread();
- mCameraStartUpThread.start();
-
- // Surface texture is from camera screen nail and startPreview needs it.
- // This must be done before startPreview.
- mIsImageCaptureIntent = isImageCaptureIntent();
-
- mPreferences.setLocalId(mActivity, mCameraId);
- CameraSettings.upgradeLocalPreferences(mPreferences.getLocal());
- // we need to reset exposure for the preview
- resetExposureCompensation();
- // Starting the preview needs preferences, camera screen nail, and
- // focus area indicator.
- mStartPreviewPrerequisiteReady.open();
-
- initializeControlByIntent();
- mQuickCapture = mActivity.getIntent().getBooleanExtra(EXTRA_QUICK_CAPTURE, false);
- mLocationManager = new LocationManager(mActivity, mUI);
- mSensorManager = (SensorManager)(mActivity.getSystemService(Context.SENSOR_SERVICE));
- }
-
- private void initializeControlByIntent() {
- mUI.initializeControlByIntent();
- if (mIsImageCaptureIntent) {
- setupCaptureParams();
- }
- }
-
- private void onPreviewStarted() {
- mCameraStartUpThread = null;
- setCameraState(IDLE);
- startFaceDetection();
- locationFirstRun();
- }
-
- // Prompt the user to pick to record location for the very first run of
- // camera only
- private void locationFirstRun() {
- if (RecordLocationPreference.isSet(mPreferences)) {
- return;
- }
- if (mActivity.isSecureCamera()) return;
- // Check if the back camera exists
- int backCameraId = CameraHolder.instance().getBackCameraId();
- if (backCameraId == -1) {
- // If there is no back camera, do not show the prompt.
- return;
- }
-
- new AlertDialog.Builder(mActivity)
- .setTitle(R.string.remember_location_title)
- .setMessage(R.string.remember_location_prompt)
- .setPositiveButton(R.string.remember_location_yes, new DialogInterface.OnClickListener() {
- @Override
- public void onClick(DialogInterface dialog, int arg1) {
- setLocationPreference(RecordLocationPreference.VALUE_ON);
- }
- })
- .setNegativeButton(R.string.remember_location_no, new DialogInterface.OnClickListener() {
- @Override
- public void onClick(DialogInterface dialog, int arg1) {
- dialog.cancel();
- }
- })
- .setOnCancelListener(new DialogInterface.OnCancelListener() {
- @Override
- public void onCancel(DialogInterface dialog) {
- setLocationPreference(RecordLocationPreference.VALUE_OFF);
- }
- })
- .show();
- }
-
- private void setLocationPreference(String value) {
- mPreferences.edit()
- .putString(CameraSettings.KEY_RECORD_LOCATION, value)
- .apply();
- // TODO: Fix this to use the actual onSharedPreferencesChanged listener
- // instead of invoking manually
- onSharedPreferenceChanged();
- }
-
- private void onCameraOpened() {
- View root = mUI.getRootView();
- // These depend on camera parameters.
-
- int width = root.getWidth();
- int height = root.getHeight();
- mFocusManager.setPreviewSize(width, height);
- openCameraCommon();
- }
-
- private void switchCamera() {
- if (mPaused) return;
-
- Log.v(TAG, "Start to switch camera. id=" + mPendingSwitchCameraId);
- mCameraId = mPendingSwitchCameraId;
- mPendingSwitchCameraId = -1;
- setCameraId(mCameraId);
-
- // from onPause
- closeCamera();
- mUI.collapseCameraControls();
- mUI.clearFaces();
- if (mFocusManager != null) mFocusManager.removeMessages();
-
- // Restart the camera and initialize the UI. From onCreate.
- mPreferences.setLocalId(mActivity, mCameraId);
- CameraSettings.upgradeLocalPreferences(mPreferences.getLocal());
- try {
- mCameraDevice = Util.openCamera(mActivity, mCameraId);
- mParameters = mCameraDevice.getParameters();
- } catch (CameraHardwareException e) {
- Util.showErrorAndFinish(mActivity, R.string.cannot_connect_camera);
- return;
- } catch (CameraDisabledException e) {
- Util.showErrorAndFinish(mActivity, R.string.camera_disabled);
- return;
- }
- initializeCapabilities();
- CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
- boolean mirror = (info.facing == CameraInfo.CAMERA_FACING_FRONT);
- mFocusManager.setMirror(mirror);
- mFocusManager.setParameters(mInitialParams);
- setupPreview();
-
- // reset zoom value index
- mZoomValue = 0;
- openCameraCommon();
-
- if (ApiHelper.HAS_SURFACE_TEXTURE) {
- // Start switch camera animation. Post a message because
- // onFrameAvailable from the old camera may already exist.
- mHandler.sendEmptyMessage(SWITCH_CAMERA_START_ANIMATION);
- }
- }
-
- protected void setCameraId(int cameraId) {
- ListPreference pref = mPreferenceGroup.findPreference(CameraSettings.KEY_CAMERA_ID);
- pref.setValue("" + cameraId);
- }
-
- // either open a new camera or switch cameras
- private void openCameraCommon() {
- loadCameraPreferences();
-
- mUI.onCameraOpened(mPreferenceGroup, mPreferences, mParameters, this);
- updateSceneMode();
- showTapToFocusToastIfNeeded();
-
-
- }
-
- public void onScreenSizeChanged(int width, int height, int previewWidth, int previewHeight) {
- if (mFocusManager != null) mFocusManager.setPreviewSize(width, height);
- }
-
- private void resetExposureCompensation() {
- String value = mPreferences.getString(CameraSettings.KEY_EXPOSURE,
- CameraSettings.EXPOSURE_DEFAULT_VALUE);
- if (!CameraSettings.EXPOSURE_DEFAULT_VALUE.equals(value)) {
- Editor editor = mPreferences.edit();
- editor.putString(CameraSettings.KEY_EXPOSURE, "0");
- editor.apply();
- }
- }
-
- private void keepMediaProviderInstance() {
- // We want to keep a reference to MediaProvider in camera's lifecycle.
- // TODO: Utilize mMediaProviderClient instance to replace
- // ContentResolver calls.
- if (mMediaProviderClient == null) {
- mMediaProviderClient = mContentResolver
- .acquireContentProviderClient(MediaStore.AUTHORITY);
- }
- }
-
- // Snapshots can only be taken after this is called. It should be called
- // once only. We could have done these things in onCreate() but we want to
- // make preview screen appear as soon as possible.
- private void initializeFirstTime() {
- if (mFirstTimeInitialized) return;
-
- // Initialize location service.
- boolean recordLocation = RecordLocationPreference.get(
- mPreferences, mContentResolver);
- mLocationManager.recordLocation(recordLocation);
-
- keepMediaProviderInstance();
-
- mUI.initializeFirstTime();
- MediaSaveService s = mActivity.getMediaSaveService();
- // We set the listener only when both service and shutterbutton
- // are initialized.
- if (s != null) {
- s.setListener(this);
- }
-
- mNamedImages = new NamedImages();
-
- mFirstTimeInitialized = true;
- addIdleHandler();
-
- mActivity.updateStorageSpaceAndHint();
- }
-
- // If the activity is paused and resumed, this method will be called in
- // onResume.
- private void initializeSecondTime() {
- // Start location update if needed.
- boolean recordLocation = RecordLocationPreference.get(
- mPreferences, mContentResolver);
- mLocationManager.recordLocation(recordLocation);
- MediaSaveService s = mActivity.getMediaSaveService();
- if (s != null) {
- s.setListener(this);
- }
- mNamedImages = new NamedImages();
- mUI.initializeSecondTime(mParameters);
- keepMediaProviderInstance();
- }
-
- @Override
- public void onSurfaceCreated(SurfaceHolder holder) {
- // Do not access the camera if camera start up thread is not finished.
- if (mCameraDevice == null || mCameraStartUpThread != null)
- return;
-
- mCameraDevice.setPreviewDisplayAsync(holder);
- // This happens when onConfigurationChanged arrives, surface has been
- // destroyed, and there is no onFullScreenChanged.
- if (mCameraState == PREVIEW_STOPPED) {
- setupPreview();
- }
- }
-
- private void showTapToFocusToastIfNeeded() {
- // Show the tap to focus toast if this is the first start.
- if (mFocusAreaSupported &&
- mPreferences.getBoolean(CameraSettings.KEY_CAMERA_FIRST_USE_HINT_SHOWN, true)) {
- // Delay the toast for one second to wait for orientation.
- mHandler.sendEmptyMessageDelayed(SHOW_TAP_TO_FOCUS_TOAST, 1000);
- }
- }
-
- private void addIdleHandler() {
- MessageQueue queue = Looper.myQueue();
- queue.addIdleHandler(new MessageQueue.IdleHandler() {
- @Override
- public boolean queueIdle() {
- Storage.ensureOSXCompatible();
- return false;
- }
- });
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- @Override
- public void startFaceDetection() {
- if (!ApiHelper.HAS_FACE_DETECTION) return;
- if (mFaceDetectionStarted) return;
- if (mParameters.getMaxNumDetectedFaces() > 0) {
- mFaceDetectionStarted = true;
- CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
- mUI.onStartFaceDetection(mDisplayOrientation,
- (info.facing == CameraInfo.CAMERA_FACING_FRONT));
- mCameraDevice.setFaceDetectionListener(mUI);
- mCameraDevice.startFaceDetection();
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- @Override
- public void stopFaceDetection() {
- if (!ApiHelper.HAS_FACE_DETECTION) return;
- if (!mFaceDetectionStarted) return;
- if (mParameters.getMaxNumDetectedFaces() > 0) {
- mFaceDetectionStarted = false;
- mCameraDevice.setFaceDetectionListener(null);
- mCameraDevice.stopFaceDetection();
- mUI.clearFaces();
- }
- }
-
- private final class ShutterCallback
- implements android.hardware.Camera.ShutterCallback {
-
- private boolean mAnimateFlash;
-
- public ShutterCallback(boolean animateFlash) {
- mAnimateFlash = animateFlash;
- }
-
- @Override
- public void onShutter() {
- mShutterCallbackTime = System.currentTimeMillis();
- mShutterLag = mShutterCallbackTime - mCaptureStartTime;
- Log.v(TAG, "mShutterLag = " + mShutterLag + "ms");
- if (mAnimateFlash) {
- mActivity.runOnUiThread(mFlashRunnable);
- }
- }
- }
-
- private final class PostViewPictureCallback implements PictureCallback {
- @Override
- public void onPictureTaken(
- byte [] data, android.hardware.Camera camera) {
- mPostViewPictureCallbackTime = System.currentTimeMillis();
- Log.v(TAG, "mShutterToPostViewCallbackTime = "
- + (mPostViewPictureCallbackTime - mShutterCallbackTime)
- + "ms");
- }
- }
-
- private final class RawPictureCallback implements PictureCallback {
- @Override
- public void onPictureTaken(
- byte [] rawData, android.hardware.Camera camera) {
- mRawPictureCallbackTime = System.currentTimeMillis();
- Log.v(TAG, "mShutterToRawCallbackTime = "
- + (mRawPictureCallbackTime - mShutterCallbackTime) + "ms");
- }
- }
-
- private final class JpegPictureCallback implements PictureCallback {
- Location mLocation;
-
- public JpegPictureCallback(Location loc) {
- mLocation = loc;
- }
-
- @Override
- public void onPictureTaken(
- final byte [] jpegData, final android.hardware.Camera camera) {
- if (mPaused) {
- return;
- }
- if (mSceneMode == Util.SCENE_MODE_HDR) {
- mUI.showSwitcher();
- mUI.setSwipingEnabled(true);
- }
-
- mJpegPictureCallbackTime = System.currentTimeMillis();
- // If postview callback has arrived, the captured image is displayed
- // in postview callback. If not, the captured image is displayed in
- // raw picture callback.
- if (mPostViewPictureCallbackTime != 0) {
- mShutterToPictureDisplayedTime =
- mPostViewPictureCallbackTime - mShutterCallbackTime;
- mPictureDisplayedToJpegCallbackTime =
- mJpegPictureCallbackTime - mPostViewPictureCallbackTime;
- } else {
- mShutterToPictureDisplayedTime =
- mRawPictureCallbackTime - mShutterCallbackTime;
- mPictureDisplayedToJpegCallbackTime =
- mJpegPictureCallbackTime - mRawPictureCallbackTime;
- }
- Log.v(TAG, "mPictureDisplayedToJpegCallbackTime = "
- + mPictureDisplayedToJpegCallbackTime + "ms");
-
- /*TODO:
- // Only animate when in full screen capture mode
- // i.e. If monkey/a user swipes to the gallery during picture taking,
- // don't show animation
- if (ApiHelper.HAS_SURFACE_TEXTURE && !mIsImageCaptureIntent
- && mActivity.mShowCameraAppView) {
- // Finish capture animation
- mHandler.removeMessages(CAPTURE_ANIMATION_DONE);
- ((CameraScreenNail) mActivity.mCameraScreenNail).animateSlide();
- mHandler.sendEmptyMessageDelayed(CAPTURE_ANIMATION_DONE,
- CaptureAnimManager.getAnimationDuration());
- } */
- mFocusManager.updateFocusUI(); // Ensure focus indicator is hidden.
- if (!mIsImageCaptureIntent) {
- if (ApiHelper.CAN_START_PREVIEW_IN_JPEG_CALLBACK) {
- setupPreview();
- } else {
- // Camera HAL of some devices have a bug. Starting preview
- // immediately after taking a picture will fail. Wait some
- // time before starting the preview.
- mHandler.sendEmptyMessageDelayed(SETUP_PREVIEW, 300);
- }
- }
-
- if (!mIsImageCaptureIntent) {
- // Calculate the width and the height of the jpeg.
- Size s = mParameters.getPictureSize();
- ExifInterface exif = Exif.getExif(jpegData);
- int orientation = Exif.getOrientation(exif);
- int width, height;
- if ((mJpegRotation + orientation) % 180 == 0) {
- width = s.width;
- height = s.height;
- } else {
- width = s.height;
- height = s.width;
- }
- String title = mNamedImages.getTitle();
- long date = mNamedImages.getDate();
- if (title == null) {
- Log.e(TAG, "Unbalanced name/data pair");
- } else {
- if (date == -1) date = mCaptureStartTime;
- if (mHeading >= 0) {
- // heading direction has been updated by the sensor.
- ExifTag directionRefTag = exif.buildTag(
- ExifInterface.TAG_GPS_IMG_DIRECTION_REF,
- ExifInterface.GpsTrackRef.MAGNETIC_DIRECTION);
- ExifTag directionTag = exif.buildTag(
- ExifInterface.TAG_GPS_IMG_DIRECTION,
- new Rational(mHeading, 1));
- exif.setTag(directionRefTag);
- exif.setTag(directionTag);
- }
- mActivity.getMediaSaveService().addImage(
- jpegData, title, date, mLocation, width, height,
- orientation, exif, mOnMediaSavedListener, mContentResolver);
- }
- } else {
- mJpegImageData = jpegData;
- if (!mQuickCapture) {
- mUI.showPostCaptureAlert();
- } else {
- onCaptureDone();
- }
- }
-
- // Check this in advance of each shot so we don't add to shutter
- // latency. It's true that someone else could write to the SD card in
- // the mean time and fill it, but that could have happened between the
- // shutter press and saving the JPEG too.
- mActivity.updateStorageSpaceAndHint();
-
- long now = System.currentTimeMillis();
- mJpegCallbackFinishTime = now - mJpegPictureCallbackTime;
- Log.v(TAG, "mJpegCallbackFinishTime = "
- + mJpegCallbackFinishTime + "ms");
- mJpegPictureCallbackTime = 0;
- }
- }
-
- private final class AutoFocusCallback
- implements android.hardware.Camera.AutoFocusCallback {
- @Override
- public void onAutoFocus(
- boolean focused, android.hardware.Camera camera) {
- if (mPaused) return;
-
- mAutoFocusTime = System.currentTimeMillis() - mFocusStartTime;
- Log.v(TAG, "mAutoFocusTime = " + mAutoFocusTime + "ms");
- setCameraState(IDLE);
- mFocusManager.onAutoFocus(focused, mUI.isShutterPressed());
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN)
- private final class AutoFocusMoveCallback
- implements android.hardware.Camera.AutoFocusMoveCallback {
- @Override
- public void onAutoFocusMoving(
- boolean moving, android.hardware.Camera camera) {
- mFocusManager.onAutoFocusMoving(moving);
- }
- }
-
- private static class NamedImages {
- private ArrayList<NamedEntity> mQueue;
- private boolean mStop;
- private NamedEntity mNamedEntity;
-
- public NamedImages() {
- mQueue = new ArrayList<NamedEntity>();
- }
-
- public void nameNewImage(ContentResolver resolver, long date) {
- NamedEntity r = new NamedEntity();
- r.title = Util.createJpegName(date);
- r.date = date;
- mQueue.add(r);
- }
-
- public String getTitle() {
- if (mQueue.isEmpty()) {
- mNamedEntity = null;
- return null;
- }
- mNamedEntity = mQueue.get(0);
- mQueue.remove(0);
-
- return mNamedEntity.title;
- }
-
- // Must be called after getTitle().
- public long getDate() {
- if (mNamedEntity == null) return -1;
- return mNamedEntity.date;
- }
-
- private static class NamedEntity {
- String title;
- long date;
- }
- }
-
- private void setCameraState(int state) {
- mCameraState = state;
- switch (state) {
- case PhotoController.PREVIEW_STOPPED:
- case PhotoController.SNAPSHOT_IN_PROGRESS:
- case PhotoController.SWITCHING_CAMERA:
- mUI.enableGestures(false);
- break;
- case PhotoController.IDLE:
- mUI.enableGestures(true);
- break;
- }
- }
-
- private void animateFlash() {
- /* //TODO:
- // Only animate when in full screen capture mode
- // i.e. If monkey/a user swipes to the gallery during picture taking,
- // don't show animation
- if (ApiHelper.HAS_SURFACE_TEXTURE && !mIsImageCaptureIntent
- && mActivity.mShowCameraAppView) {
- // Start capture animation.
- ((CameraScreenNail) mActivity.mCameraScreenNail).animateFlash(mDisplayRotation);
- mUI.enablePreviewThumb(true);
- mHandler.sendEmptyMessageDelayed(CAPTURE_ANIMATION_DONE,
- CaptureAnimManager.getAnimationDuration());
- } */
- }
-
- @Override
- public boolean capture() {
- // If we are already in the middle of taking a snapshot or the image save request
- // is full then ignore.
- if (mCameraDevice == null || mCameraState == SNAPSHOT_IN_PROGRESS
- || mCameraState == SWITCHING_CAMERA
- || mActivity.getMediaSaveService().isQueueFull()) {
- return false;
- }
- mCaptureStartTime = System.currentTimeMillis();
- mPostViewPictureCallbackTime = 0;
- mJpegImageData = null;
-
- final boolean animateBefore = (mSceneMode == Util.SCENE_MODE_HDR);
-
- if (animateBefore) {
- animateFlash();
- }
-
- // Set rotation and gps data.
- int orientation;
- // We need to be consistent with the framework orientation (i.e. the
- // orientation of the UI.) when the auto-rotate screen setting is on.
- if (mActivity.isAutoRotateScreen()) {
- orientation = (360 - mDisplayRotation) % 360;
- } else {
- orientation = mOrientation;
- }
- mJpegRotation = Util.getJpegRotation(mCameraId, orientation);
- mParameters.setRotation(mJpegRotation);
- Location loc = mLocationManager.getCurrentLocation();
- Util.setGpsParameters(mParameters, loc);
- mCameraDevice.setParameters(mParameters);
-
- mCameraDevice.takePicture2(new ShutterCallback(!animateBefore),
- mRawPictureCallback, mPostViewPictureCallback,
- new JpegPictureCallback(loc), mCameraState,
- mFocusManager.getFocusState());
-
- mNamedImages.nameNewImage(mContentResolver, mCaptureStartTime);
-
- mFaceDetectionStarted = false;
- setCameraState(SNAPSHOT_IN_PROGRESS);
- UsageStatistics.onEvent(UsageStatistics.COMPONENT_CAMERA,
- UsageStatistics.ACTION_CAPTURE_DONE, "Photo");
- return true;
- }
-
- @Override
- public void setFocusParameters() {
- setCameraParameters(UPDATE_PARAM_PREFERENCE);
- }
-
- private int getPreferredCameraId(ComboPreferences preferences) {
- int intentCameraId = Util.getCameraFacingIntentExtras(mActivity);
- if (intentCameraId != -1) {
- // Testing purpose. Launch a specific camera through the intent
- // extras.
- return intentCameraId;
- } else {
- return CameraSettings.readPreferredCameraId(preferences);
- }
- }
-
- private void updateSceneMode() {
- // If scene mode is set, we cannot set flash mode, white balance, and
- // focus mode, instead, we read it from driver
- if (!Parameters.SCENE_MODE_AUTO.equals(mSceneMode)) {
- overrideCameraSettings(mParameters.getFlashMode(),
- mParameters.getWhiteBalance(), mParameters.getFocusMode());
- } else {
- overrideCameraSettings(null, null, null);
- }
- }
-
- private void overrideCameraSettings(final String flashMode,
- final String whiteBalance, final String focusMode) {
- mUI.overrideSettings(
- CameraSettings.KEY_FLASH_MODE, flashMode,
- CameraSettings.KEY_WHITE_BALANCE, whiteBalance,
- CameraSettings.KEY_FOCUS_MODE, focusMode);
- }
-
- private void loadCameraPreferences() {
- CameraSettings settings = new CameraSettings(mActivity, mInitialParams,
- mCameraId, CameraHolder.instance().getCameraInfo());
- mPreferenceGroup = settings.getPreferenceGroup(R.xml.camera_preferences);
- }
-
- @Override
- public void onOrientationChanged(int orientation) {
- // We keep the last known orientation. So if the user first orient
- // the camera then point the camera to floor or sky, we still have
- // the correct orientation.
- if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) return;
- mOrientation = Util.roundOrientation(orientation, mOrientation);
-
- // Show the toast after getting the first orientation changed.
- if (mHandler.hasMessages(SHOW_TAP_TO_FOCUS_TOAST)) {
- mHandler.removeMessages(SHOW_TAP_TO_FOCUS_TOAST);
- showTapToFocusToast();
- }
- }
-
- @Override
- public void onStop() {
- if (mMediaProviderClient != null) {
- mMediaProviderClient.release();
- mMediaProviderClient = null;
- }
- }
-
- @Override
- public void onCaptureCancelled() {
- mActivity.setResultEx(Activity.RESULT_CANCELED, new Intent());
- mActivity.finish();
- }
-
- @Override
- public void onCaptureRetake() {
- if (mPaused)
- return;
- mUI.hidePostCaptureAlert();
- setupPreview();
- }
-
- @Override
- public void onCaptureDone() {
- if (mPaused) {
- return;
- }
-
- byte[] data = mJpegImageData;
-
- if (mCropValue == null) {
- // First handle the no crop case -- just return the value. If the
- // caller specifies a "save uri" then write the data to its
- // stream. Otherwise, pass back a scaled down version of the bitmap
- // directly in the extras.
- if (mSaveUri != null) {
- OutputStream outputStream = null;
- try {
- outputStream = mContentResolver.openOutputStream(mSaveUri);
- outputStream.write(data);
- outputStream.close();
-
- mActivity.setResultEx(Activity.RESULT_OK);
- mActivity.finish();
- } catch (IOException ex) {
- // ignore exception
- } finally {
- Util.closeSilently(outputStream);
- }
- } else {
- ExifInterface exif = Exif.getExif(data);
- int orientation = Exif.getOrientation(exif);
- Bitmap bitmap = Util.makeBitmap(data, 50 * 1024);
- bitmap = Util.rotate(bitmap, orientation);
- mActivity.setResultEx(Activity.RESULT_OK,
- new Intent("inline-data").putExtra("data", bitmap));
- mActivity.finish();
- }
- } else {
- // Save the image to a temp file and invoke the cropper
- Uri tempUri = null;
- FileOutputStream tempStream = null;
- try {
- File path = mActivity.getFileStreamPath(sTempCropFilename);
- path.delete();
- tempStream = mActivity.openFileOutput(sTempCropFilename, 0);
- tempStream.write(data);
- tempStream.close();
- tempUri = Uri.fromFile(path);
- } catch (FileNotFoundException ex) {
- mActivity.setResultEx(Activity.RESULT_CANCELED);
- mActivity.finish();
- return;
- } catch (IOException ex) {
- mActivity.setResultEx(Activity.RESULT_CANCELED);
- mActivity.finish();
- return;
- } finally {
- Util.closeSilently(tempStream);
- }
-
- Bundle newExtras = new Bundle();
- if (mCropValue.equals("circle")) {
- newExtras.putString("circleCrop", "true");
- }
- if (mSaveUri != null) {
- newExtras.putParcelable(MediaStore.EXTRA_OUTPUT, mSaveUri);
- } else {
- newExtras.putBoolean(CropExtras.KEY_RETURN_DATA, true);
- }
- if (mActivity.isSecureCamera()) {
- newExtras.putBoolean(CropExtras.KEY_SHOW_WHEN_LOCKED, true);
- }
-
- Intent cropIntent = new Intent(CropActivity.CROP_ACTION);
-
- cropIntent.setData(tempUri);
- cropIntent.putExtras(newExtras);
-
- mActivity.startActivityForResult(cropIntent, REQUEST_CROP);
- }
- }
-
- @Override
- public void onShutterButtonFocus(boolean pressed) {
- if (mPaused || mUI.collapseCameraControls()
- || (mCameraState == SNAPSHOT_IN_PROGRESS)
- || (mCameraState == PREVIEW_STOPPED)) return;
-
- // Do not do focus if there is not enough storage.
- if (pressed && !canTakePicture()) return;
-
- if (pressed) {
- mFocusManager.onShutterDown();
- } else {
- // for countdown mode, we need to postpone the shutter release
- // i.e. lock the focus during countdown.
- if (!mUI.isCountingDown()) {
- mFocusManager.onShutterUp();
- }
- }
- }
-
- @Override
- public void onShutterButtonClick() {
- if (mPaused || mUI.collapseCameraControls()
- || (mCameraState == SWITCHING_CAMERA)
- || (mCameraState == PREVIEW_STOPPED)) return;
-
- // Do not take the picture if there is not enough storage.
- if (mActivity.getStorageSpace() <= Storage.LOW_STORAGE_THRESHOLD) {
- Log.i(TAG, "Not enough space or storage not ready. remaining="
- + mActivity.getStorageSpace());
- return;
- }
- Log.v(TAG, "onShutterButtonClick: mCameraState=" + mCameraState);
-
- if (mSceneMode == Util.SCENE_MODE_HDR) {
- mUI.hideSwitcher();
- mUI.setSwipingEnabled(false);
- }
- // If the user wants to do a snapshot while the previous one is still
- // in progress, remember the fact and do it after we finish the previous
- // one and re-start the preview. Snapshot in progress also includes the
- // state that autofocus is focusing and a picture will be taken when
- // focus callback arrives.
- if ((mFocusManager.isFocusingSnapOnFinish() || mCameraState == SNAPSHOT_IN_PROGRESS)
- && !mIsImageCaptureIntent) {
- mSnapshotOnIdle = true;
- return;
- }
-
- String timer = mPreferences.getString(
- CameraSettings.KEY_TIMER,
- mActivity.getString(R.string.pref_camera_timer_default));
- boolean playSound = mPreferences.getString(CameraSettings.KEY_TIMER_SOUND_EFFECTS,
- mActivity.getString(R.string.pref_camera_timer_sound_default))
- .equals(mActivity.getString(R.string.setting_on_value));
-
- int seconds = Integer.parseInt(timer);
- // When shutter button is pressed, check whether the previous countdown is
- // finished. If not, cancel the previous countdown and start a new one.
- if (mUI.isCountingDown()) {
- mUI.cancelCountDown();
- }
- if (seconds > 0) {
- mUI.startCountDown(seconds, playSound);
- } else {
- mSnapshotOnIdle = false;
- mFocusManager.doSnap();
- }
- }
-
- @Override
- public void installIntentFilter() {
- }
-
- @Override
- public boolean updateStorageHintOnResume() {
- return mFirstTimeInitialized;
- }
-
- @Override
- public void updateCameraAppView() {
- }
-
- @Override
- public void onResumeBeforeSuper() {
- mPaused = false;
- }
-
- @Override
- public void onResumeAfterSuper() {
- if (mOpenCameraFail || mCameraDisabled) return;
-
- mJpegPictureCallbackTime = 0;
- mZoomValue = 0;
- // Start the preview if it is not started.
- if (mCameraState == PREVIEW_STOPPED && mCameraStartUpThread == null) {
- resetExposureCompensation();
- mCameraStartUpThread = new CameraStartUpThread();
- mCameraStartUpThread.start();
- }
-
- // If first time initialization is not finished, put it in the
- // message queue.
- if (!mFirstTimeInitialized) {
- mHandler.sendEmptyMessage(FIRST_TIME_INIT);
- } else {
- initializeSecondTime();
- }
- keepScreenOnAwhile();
-
- // Dismiss open menu if exists.
- PopupManager.getInstance(mActivity).notifyShowPopup(null);
- UsageStatistics.onContentViewChanged(
- UsageStatistics.COMPONENT_CAMERA, "PhotoModule");
-
- Sensor gsensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
- if (gsensor != null) {
- mSensorManager.registerListener(this, gsensor, SensorManager.SENSOR_DELAY_NORMAL);
- }
-
- Sensor msensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
- if (msensor != null) {
- mSensorManager.registerListener(this, msensor, SensorManager.SENSOR_DELAY_NORMAL);
- }
- }
-
- void waitCameraStartUpThread() {
- try {
- if (mCameraStartUpThread != null) {
- mCameraStartUpThread.cancel();
- mCameraStartUpThread.join();
- mCameraStartUpThread = null;
- setCameraState(IDLE);
- }
- } catch (InterruptedException e) {
- // ignore
- }
- }
-
- @Override
- public void onPauseBeforeSuper() {
- mPaused = true;
- Sensor gsensor = mSensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
- if (gsensor != null) {
- mSensorManager.unregisterListener(this, gsensor);
- }
-
- Sensor msensor = mSensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
- if (msensor != null) {
- mSensorManager.unregisterListener(this, msensor);
- }
- }
-
- @Override
- public void onPauseAfterSuper() {
- // Wait the camera start up thread to finish.
- waitCameraStartUpThread();
-
- // When camera is started from secure lock screen for the first time
- // after screen on, the activity gets onCreate->onResume->onPause->onResume.
- // To reduce the latency, keep the camera for a short time so it does
- // not need to be opened again.
- if (mCameraDevice != null && mActivity.isSecureCamera()
- && ActivityBase.isFirstStartAfterScreenOn()) {
- ActivityBase.resetFirstStartAfterScreenOn();
- CameraHolder.instance().keep(KEEP_CAMERA_TIMEOUT);
- }
- // Reset the focus first. Camera CTS does not guarantee that
- // cancelAutoFocus is allowed after preview stops.
- if (mCameraDevice != null && mCameraState != PREVIEW_STOPPED) {
- mCameraDevice.cancelAutoFocus();
- }
- stopPreview();
-
- mNamedImages = null;
-
- if (mLocationManager != null) mLocationManager.recordLocation(false);
-
- // If we are in an image capture intent and has taken
- // a picture, we just clear it in onPause.
- mJpegImageData = null;
-
- // Remove the messages in the event queue.
- mHandler.removeMessages(SETUP_PREVIEW);
- mHandler.removeMessages(FIRST_TIME_INIT);
- mHandler.removeMessages(CHECK_DISPLAY_ROTATION);
- mHandler.removeMessages(SWITCH_CAMERA);
- mHandler.removeMessages(SWITCH_CAMERA_START_ANIMATION);
- mHandler.removeMessages(CAMERA_OPEN_DONE);
- mHandler.removeMessages(START_PREVIEW_DONE);
- mHandler.removeMessages(OPEN_CAMERA_FAIL);
- mHandler.removeMessages(CAMERA_DISABLED);
-
- closeCamera();
-
- resetScreenOn();
- mUI.onPause();
-
- mPendingSwitchCameraId = -1;
- if (mFocusManager != null) mFocusManager.removeMessages();
- MediaSaveService s = mActivity.getMediaSaveService();
- if (s != null) {
- s.setListener(null);
- }
- }
-
- /**
- * The focus manager is the first UI related element to get initialized,
- * and it requires the RenderOverlay, so initialize it here
- */
- private void initializeFocusManager() {
- // Create FocusManager object. startPreview needs it.
- // if mFocusManager not null, reuse it
- // otherwise create a new instance
- if (mFocusManager != null) {
- mFocusManager.removeMessages();
- } else {
- CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
- boolean mirror = (info.facing == CameraInfo.CAMERA_FACING_FRONT);
- String[] defaultFocusModes = mActivity.getResources().getStringArray(
- R.array.pref_camera_focusmode_default_array);
- mFocusManager = new FocusOverlayManager(mPreferences, defaultFocusModes,
- mInitialParams, this, mirror,
- mActivity.getMainLooper(), mUI);
- }
- }
-
- @Override
- public void onConfigurationChanged(Configuration newConfig) {
- Log.v(TAG, "onConfigurationChanged");
- setDisplayOrientation();
- }
-
- @Override
- public void onActivityResult(
- int requestCode, int resultCode, Intent data) {
- switch (requestCode) {
- case REQUEST_CROP: {
- Intent intent = new Intent();
- if (data != null) {
- Bundle extras = data.getExtras();
- if (extras != null) {
- intent.putExtras(extras);
- }
- }
- mActivity.setResultEx(resultCode, intent);
- mActivity.finish();
-
- File path = mActivity.getFileStreamPath(sTempCropFilename);
- path.delete();
-
- break;
- }
- }
- }
-
- private boolean canTakePicture() {
- return isCameraIdle() && (mActivity.getStorageSpace() > Storage.LOW_STORAGE_THRESHOLD);
- }
-
- @Override
- public void autoFocus() {
- mFocusStartTime = System.currentTimeMillis();
- mCameraDevice.autoFocus(mAutoFocusCallback);
- setCameraState(FOCUSING);
- }
-
- @Override
- public void cancelAutoFocus() {
- mCameraDevice.cancelAutoFocus();
- setCameraState(IDLE);
- setCameraParameters(UPDATE_PARAM_PREFERENCE);
- }
-
- // Preview area is touched. Handle touch focus.
- @Override
- public void onSingleTapUp(View view, int x, int y) {
- if (mPaused || mCameraDevice == null || !mFirstTimeInitialized
- || mCameraState == SNAPSHOT_IN_PROGRESS
- || mCameraState == SWITCHING_CAMERA
- || mCameraState == PREVIEW_STOPPED) {
- return;
- }
-
- // Do not trigger touch focus if popup window is opened.
- if (mUI.removeTopLevelPopup()) return;
-
- // Check if metering area or focus area is supported.
- if (!mFocusAreaSupported && !mMeteringAreaSupported) return;
- mFocusManager.onSingleTapUp(x, y);
- }
-
- @Override
- public boolean onBackPressed() {
- return mUI.onBackPressed();
- }
-
- @Override
- public boolean onKeyDown(int keyCode, KeyEvent event) {
- switch (keyCode) {
- case KeyEvent.KEYCODE_VOLUME_UP:
- case KeyEvent.KEYCODE_VOLUME_DOWN:
- case KeyEvent.KEYCODE_FOCUS:
- if (/*TODO: mActivity.isInCameraApp() &&*/ mFirstTimeInitialized) {
- if (event.getRepeatCount() == 0) {
- onShutterButtonFocus(true);
- }
- return true;
- }
- return false;
- case KeyEvent.KEYCODE_CAMERA:
- if (mFirstTimeInitialized && event.getRepeatCount() == 0) {
- onShutterButtonClick();
- }
- return true;
- case KeyEvent.KEYCODE_DPAD_CENTER:
- // If we get a dpad center event without any focused view, move
- // the focus to the shutter button and press it.
- if (mFirstTimeInitialized && event.getRepeatCount() == 0) {
- // Start auto-focus immediately to reduce shutter lag. After
- // the shutter button gets the focus, onShutterButtonFocus()
- // will be called again but it is fine.
- if (mUI.removeTopLevelPopup()) return true;
- onShutterButtonFocus(true);
- mUI.pressShutterButton();
- }
- return true;
- }
- return false;
- }
-
- @Override
- public boolean onKeyUp(int keyCode, KeyEvent event) {
- switch (keyCode) {
- case KeyEvent.KEYCODE_VOLUME_UP:
- case KeyEvent.KEYCODE_VOLUME_DOWN:
- if (/*mActivity.isInCameraApp() && */ mFirstTimeInitialized) {
- onShutterButtonClick();
- return true;
- }
- return false;
- case KeyEvent.KEYCODE_FOCUS:
- if (mFirstTimeInitialized) {
- onShutterButtonFocus(false);
- }
- return true;
- }
- return false;
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- private void closeCamera() {
- if (mCameraDevice != null) {
- mCameraDevice.setZoomChangeListener(null);
- if(ApiHelper.HAS_FACE_DETECTION) {
- mCameraDevice.setFaceDetectionListener(null);
- }
- mCameraDevice.setErrorCallback(null);
- CameraHolder.instance().release();
- mFaceDetectionStarted = false;
- mCameraDevice = null;
- setCameraState(PREVIEW_STOPPED);
- mFocusManager.onCameraReleased();
- }
- }
-
- private void setDisplayOrientation() {
- mDisplayRotation = Util.getDisplayRotation(mActivity);
- mDisplayOrientation = Util.getDisplayOrientation(mDisplayRotation, mCameraId);
- mCameraDisplayOrientation = mDisplayOrientation;
- mUI.setDisplayOrientation(mDisplayOrientation);
- if (mFocusManager != null) {
- mFocusManager.setDisplayOrientation(mDisplayOrientation);
- }
- // Change the camera display orientation
- if (mCameraDevice != null) {
- mCameraDevice.setDisplayOrientation(mCameraDisplayOrientation);
- }
- }
-
- // Only called by UI thread.
- private void setupPreview() {
- mFocusManager.resetTouchFocus();
- startPreview();
- setCameraState(IDLE);
- startFaceDetection();
- }
-
- // This can be called by UI Thread or CameraStartUpThread. So this should
- // not modify the views.
- private void startPreview() {
- mCameraDevice.setErrorCallback(mErrorCallback);
-
- // ICS camera frameworks has a bug. Face detection state is not cleared
- // after taking a picture. Stop the preview to work around it. The bug
- // was fixed in JB.
- if (mCameraState != PREVIEW_STOPPED) stopPreview();
-
- setDisplayOrientation();
-
- if (!mSnapshotOnIdle) {
- // If the focus mode is continuous autofocus, call cancelAutoFocus to
- // resume it because it may have been paused by autoFocus call.
- if (Util.FOCUS_MODE_CONTINUOUS_PICTURE.equals(mFocusManager.getFocusMode())) {
- mCameraDevice.cancelAutoFocus();
- }
- mFocusManager.setAeAwbLock(false); // Unlock AE and AWB.
- }
- setCameraParameters(UPDATE_PARAM_ALL);
- // Let UI set its expected aspect ratio
- mUI.setPreviewSize(mParameters.getPreviewSize());
- Object st = mUI.getSurfaceTexture();
- if (st != null) {
- mCameraDevice.setPreviewTextureAsync((SurfaceTexture) st);
- }
-
- Log.v(TAG, "startPreview");
- mCameraDevice.startPreviewAsync();
- mFocusManager.onPreviewStarted();
-
- if (mSnapshotOnIdle) {
- mHandler.post(mDoSnapRunnable);
- }
- }
-
- @Override
- public void stopPreview() {
- if (mCameraDevice != null && mCameraState != PREVIEW_STOPPED) {
- Log.v(TAG, "stopPreview");
- mCameraDevice.stopPreview();
- mFaceDetectionStarted = false;
- }
- setCameraState(PREVIEW_STOPPED);
- if (mFocusManager != null) mFocusManager.onPreviewStopped();
- }
-
- @SuppressWarnings("deprecation")
- private void updateCameraParametersInitialize() {
- // Reset preview frame rate to the maximum because it may be lowered by
- // video camera application.
- int[] fpsRange = Util.getMaxPreviewFpsRange(mParameters);
- if (fpsRange.length > 0) {
- mParameters.setPreviewFpsRange(
- fpsRange[Parameters.PREVIEW_FPS_MIN_INDEX],
- fpsRange[Parameters.PREVIEW_FPS_MAX_INDEX]);
- }
-
- mParameters.set(Util.RECORDING_HINT, Util.FALSE);
-
- // Disable video stabilization. Convenience methods not available in API
- // level <= 14
- String vstabSupported = mParameters.get("video-stabilization-supported");
- if ("true".equals(vstabSupported)) {
- mParameters.set("video-stabilization", "false");
- }
- }
-
- private void updateCameraParametersZoom() {
- // Set zoom.
- if (mParameters.isZoomSupported()) {
- mParameters.setZoom(mZoomValue);
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN)
- private void setAutoExposureLockIfSupported() {
- if (mAeLockSupported) {
- mParameters.setAutoExposureLock(mFocusManager.getAeAwbLock());
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN)
- private void setAutoWhiteBalanceLockIfSupported() {
- if (mAwbLockSupported) {
- mParameters.setAutoWhiteBalanceLock(mFocusManager.getAeAwbLock());
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- private void setFocusAreasIfSupported() {
- if (mFocusAreaSupported) {
- mParameters.setFocusAreas(mFocusManager.getFocusAreas());
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- private void setMeteringAreasIfSupported() {
- if (mMeteringAreaSupported) {
- // Use the same area for focus and metering.
- mParameters.setMeteringAreas(mFocusManager.getMeteringAreas());
- }
- }
-
- private void updateCameraParametersPreference() {
- setAutoExposureLockIfSupported();
- setAutoWhiteBalanceLockIfSupported();
- setFocusAreasIfSupported();
- setMeteringAreasIfSupported();
-
- // Set picture size.
- String pictureSize = mPreferences.getString(
- CameraSettings.KEY_PICTURE_SIZE, null);
- if (pictureSize == null) {
- CameraSettings.initialCameraPictureSize(mActivity, mParameters);
- } else {
- List<Size> supported = mParameters.getSupportedPictureSizes();
- CameraSettings.setCameraPictureSize(
- pictureSize, supported, mParameters);
- }
- Size size = mParameters.getPictureSize();
-
- // Set a preview size that is closest to the viewfinder height and has
- // the right aspect ratio.
- List<Size> sizes = mParameters.getSupportedPreviewSizes();
- Size optimalSize = Util.getOptimalPreviewSize(mActivity, sizes,
- (double) size.width / size.height);
- Size original = mParameters.getPreviewSize();
- if (!original.equals(optimalSize)) {
- mParameters.setPreviewSize(optimalSize.width, optimalSize.height);
-
- // Zoom related settings will be changed for different preview
- // sizes, so set and read the parameters to get latest values
- if (mHandler.getLooper() == Looper.myLooper()) {
- // On UI thread only, not when camera starts up
- setupPreview();
- } else {
- mCameraDevice.setParameters(mParameters);
- }
- mParameters = mCameraDevice.getParameters();
- }
- Log.v(TAG, "Preview size is " + optimalSize.width + "x" + optimalSize.height);
-
- // Since changing scene mode may change supported values, set scene mode
- // first. HDR is a scene mode. To promote it in UI, it is stored in a
- // separate preference.
- String hdr = mPreferences.getString(CameraSettings.KEY_CAMERA_HDR,
- mActivity.getString(R.string.pref_camera_hdr_default));
- if (mActivity.getString(R.string.setting_on_value).equals(hdr)) {
- mSceneMode = Util.SCENE_MODE_HDR;
- } else {
- mSceneMode = mPreferences.getString(
- CameraSettings.KEY_SCENE_MODE,
- mActivity.getString(R.string.pref_camera_scenemode_default));
- }
- if (Util.isSupported(mSceneMode, mParameters.getSupportedSceneModes())) {
- if (!mParameters.getSceneMode().equals(mSceneMode)) {
- mParameters.setSceneMode(mSceneMode);
-
- // Setting scene mode will change the settings of flash mode,
- // white balance, and focus mode. Here we read back the
- // parameters, so we can know those settings.
- mCameraDevice.setParameters(mParameters);
- mParameters = mCameraDevice.getParameters();
- }
- } else {
- mSceneMode = mParameters.getSceneMode();
- if (mSceneMode == null) {
- mSceneMode = Parameters.SCENE_MODE_AUTO;
- }
- }
-
- // Set JPEG quality.
- int jpegQuality = CameraProfile.getJpegEncodingQualityParameter(mCameraId,
- CameraProfile.QUALITY_HIGH);
- mParameters.setJpegQuality(jpegQuality);
-
- // For the following settings, we need to check if the settings are
- // still supported by latest driver, if not, ignore the settings.
-
- // Set exposure compensation
- int value = CameraSettings.readExposure(mPreferences);
- int max = mParameters.getMaxExposureCompensation();
- int min = mParameters.getMinExposureCompensation();
- if (value >= min && value <= max) {
- mParameters.setExposureCompensation(value);
- } else {
- Log.w(TAG, "invalid exposure range: " + value);
- }
-
- if (Parameters.SCENE_MODE_AUTO.equals(mSceneMode)) {
- // Set flash mode.
- String flashMode = mPreferences.getString(
- CameraSettings.KEY_FLASH_MODE,
- mActivity.getString(R.string.pref_camera_flashmode_default));
- List<String> supportedFlash = mParameters.getSupportedFlashModes();
- if (Util.isSupported(flashMode, supportedFlash)) {
- mParameters.setFlashMode(flashMode);
- } else {
- flashMode = mParameters.getFlashMode();
- if (flashMode == null) {
- flashMode = mActivity.getString(
- R.string.pref_camera_flashmode_no_flash);
- }
- }
-
- // Set white balance parameter.
- String whiteBalance = mPreferences.getString(
- CameraSettings.KEY_WHITE_BALANCE,
- mActivity.getString(R.string.pref_camera_whitebalance_default));
- if (Util.isSupported(whiteBalance,
- mParameters.getSupportedWhiteBalance())) {
- mParameters.setWhiteBalance(whiteBalance);
- } else {
- whiteBalance = mParameters.getWhiteBalance();
- if (whiteBalance == null) {
- whiteBalance = Parameters.WHITE_BALANCE_AUTO;
- }
- }
-
- // Set focus mode.
- mFocusManager.overrideFocusMode(null);
- mParameters.setFocusMode(mFocusManager.getFocusMode());
- } else {
- mFocusManager.overrideFocusMode(mParameters.getFocusMode());
- }
-
- if (mContinousFocusSupported && ApiHelper.HAS_AUTO_FOCUS_MOVE_CALLBACK) {
- updateAutoFocusMoveCallback();
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.JELLY_BEAN)
- private void updateAutoFocusMoveCallback() {
- if (mParameters.getFocusMode().equals(Util.FOCUS_MODE_CONTINUOUS_PICTURE)) {
- mCameraDevice.setAutoFocusMoveCallback(
- (AutoFocusMoveCallback) mAutoFocusMoveCallback);
- } else {
- mCameraDevice.setAutoFocusMoveCallback(null);
- }
- }
-
- // We separate the parameters into several subsets, so we can update only
- // the subsets actually need updating. The PREFERENCE set needs extra
- // locking because the preference can be changed from GLThread as well.
- private void setCameraParameters(int updateSet) {
- if ((updateSet & UPDATE_PARAM_INITIALIZE) != 0) {
- updateCameraParametersInitialize();
- }
-
- if ((updateSet & UPDATE_PARAM_ZOOM) != 0) {
- updateCameraParametersZoom();
- }
-
- if ((updateSet & UPDATE_PARAM_PREFERENCE) != 0) {
- updateCameraParametersPreference();
- }
-
- mCameraDevice.setParameters(mParameters);
- }
-
- // If the Camera is idle, update the parameters immediately, otherwise
- // accumulate them in mUpdateSet and update later.
- private void setCameraParametersWhenIdle(int additionalUpdateSet) {
- mUpdateSet |= additionalUpdateSet;
- if (mCameraDevice == null) {
- // We will update all the parameters when we open the device, so
- // we don't need to do anything now.
- mUpdateSet = 0;
- return;
- } else if (isCameraIdle()) {
- setCameraParameters(mUpdateSet);
- updateSceneMode();
- mUpdateSet = 0;
- } else {
- if (!mHandler.hasMessages(SET_CAMERA_PARAMETERS_WHEN_IDLE)) {
- mHandler.sendEmptyMessageDelayed(
- SET_CAMERA_PARAMETERS_WHEN_IDLE, 1000);
- }
- }
- }
-
- public boolean isCameraIdle() {
- return (mCameraState == IDLE) ||
- (mCameraState == PREVIEW_STOPPED) ||
- ((mFocusManager != null) && mFocusManager.isFocusCompleted()
- && (mCameraState != SWITCHING_CAMERA));
- }
-
- public boolean isImageCaptureIntent() {
- String action = mActivity.getIntent().getAction();
- return (MediaStore.ACTION_IMAGE_CAPTURE.equals(action)
- || ActivityBase.ACTION_IMAGE_CAPTURE_SECURE.equals(action));
- }
-
- private void setupCaptureParams() {
- Bundle myExtras = mActivity.getIntent().getExtras();
- if (myExtras != null) {
- mSaveUri = (Uri) myExtras.getParcelable(MediaStore.EXTRA_OUTPUT);
- mCropValue = myExtras.getString("crop");
- }
- }
-
- @Override
- public void onSharedPreferenceChanged() {
- // ignore the events after "onPause()"
- if (mPaused) return;
-
- boolean recordLocation = RecordLocationPreference.get(
- mPreferences, mContentResolver);
- mLocationManager.recordLocation(recordLocation);
-
- setCameraParametersWhenIdle(UPDATE_PARAM_PREFERENCE);
- mUI.updateOnScreenIndicators(mParameters, mPreferenceGroup, mPreferences);
- }
-
- @Override
- public void onCameraPickerClicked(int cameraId) {
- if (mPaused || mPendingSwitchCameraId != -1) return;
-
- mPendingSwitchCameraId = cameraId;
-
- Log.v(TAG, "Start to switch camera. cameraId=" + cameraId);
- // We need to keep a preview frame for the animation before
- // releasing the camera. This will trigger onPreviewTextureCopied.
- //TODO: Need to animate the camera switch
- switchCamera();
- }
-
- // Preview texture has been copied. Now camera can be released and the
- // animation can be started.
- @Override
- public void onPreviewTextureCopied() {
- mHandler.sendEmptyMessage(SWITCH_CAMERA);
- }
-
- @Override
- public void onCaptureTextureCopied() {
- }
-
- @Override
- public void onUserInteraction() {
- if (!mActivity.isFinishing()) keepScreenOnAwhile();
- }
-
- private void resetScreenOn() {
- mHandler.removeMessages(CLEAR_SCREEN_DELAY);
- mActivity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- }
-
- private void keepScreenOnAwhile() {
- mHandler.removeMessages(CLEAR_SCREEN_DELAY);
- mActivity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- mHandler.sendEmptyMessageDelayed(CLEAR_SCREEN_DELAY, SCREEN_DELAY);
- }
-
- @Override
- public void onOverriddenPreferencesClicked() {
- if (mPaused) return;
- mUI.showPreferencesToast();
- }
-
- private void showTapToFocusToast() {
- // TODO: Use a toast?
- new RotateTextToast(mActivity, R.string.tap_to_focus, 0).show();
- // Clear the preference.
- Editor editor = mPreferences.edit();
- editor.putBoolean(CameraSettings.KEY_CAMERA_FIRST_USE_HINT_SHOWN, false);
- editor.apply();
- }
-
- private void initializeCapabilities() {
- mInitialParams = mCameraDevice.getParameters();
- mFocusAreaSupported = Util.isFocusAreaSupported(mInitialParams);
- mMeteringAreaSupported = Util.isMeteringAreaSupported(mInitialParams);
- mAeLockSupported = Util.isAutoExposureLockSupported(mInitialParams);
- mAwbLockSupported = Util.isAutoWhiteBalanceLockSupported(mInitialParams);
- mContinousFocusSupported = mInitialParams.getSupportedFocusModes().contains(
- Util.FOCUS_MODE_CONTINUOUS_PICTURE);
- }
-
- @Override
- public void onCountDownFinished() {
- mSnapshotOnIdle = false;
- mFocusManager.doSnap();
- mFocusManager.onShutterUp();
- }
-
- @Override
- public boolean needsSwitcher() {
- return !mIsImageCaptureIntent;
- }
-
- @Override
- public boolean needsPieMenu() {
- return true;
- }
-
- @Override
- public void onShowSwitcherPopup() {
- mUI.onShowSwitcherPopup();
- }
-
- @Override
- public int onZoomChanged(int index) {
- // Not useful to change zoom value when the activity is paused.
- if (mPaused) return index;
- mZoomValue = index;
- if (mParameters == null || mCameraDevice == null) return index;
- // Set zoom parameters asynchronously
- mParameters.setZoom(mZoomValue);
- mCameraDevice.setParameters(mParameters);
- Parameters p = mCameraDevice.getParameters();
- if (p != null) return p.getZoom();
- return index;
- }
-
- @Override
- public int getCameraState() {
- return mCameraState;
- }
-
- @Override
- public void onQueueStatus(boolean full) {
- mUI.enableShutter(!full);
- }
-
- @Override
- public void onMediaSaveServiceConnected(MediaSaveService s) {
- // We set the listener only when both service and shutterbutton
- // are initialized.
- if (mFirstTimeInitialized) {
- s.setListener(this);
- }
- }
-
- @Override
- public void onAccuracyChanged(Sensor sensor, int accuracy) {
- }
-
- @Override
- public void onSensorChanged(SensorEvent event) {
- int type = event.sensor.getType();
- float[] data;
- if (type == Sensor.TYPE_ACCELEROMETER) {
- data = mGData;
- } else if (type == Sensor.TYPE_MAGNETIC_FIELD) {
- data = mMData;
- } else {
- // we should not be here.
- return;
- }
- for (int i = 0; i < 3 ; i++) {
- data[i] = event.values[i];
- }
- float[] orientation = new float[3];
- SensorManager.getRotationMatrix(mR, null, mGData, mMData);
- SensorManager.getOrientation(mR, orientation);
- mHeading = (int) (orientation[0] * 180f / Math.PI) % 360;
- if (mHeading < 0) {
- mHeading += 360;
- }
- }
-/* Below is no longer needed, except to get rid of compile error
- * TODO: Remove these
- */
-
- // TODO: Delete this function after old camera code is removed
- @Override
- public void onRestorePreferencesClicked() {}
-
- @Override
- public void onFullScreenChanged(boolean full) {
- /* //TODO:
- mUI.onFullScreenChanged(full);
- if (ApiHelper.HAS_SURFACE_TEXTURE) {
- if (mActivity.mCameraScreenNail != null) {
- ((CameraScreenNail) mActivity.mCameraScreenNail).setFullScreen(full);
- }
- return;
- } */
- }
-
-}
diff --git a/src/com/android/camera/NewPhotoUI.java b/src/com/android/camera/NewPhotoUI.java
deleted file mode 100644
index 2f29e440f..000000000
--- a/src/com/android/camera/NewPhotoUI.java
+++ /dev/null
@@ -1,787 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package com.android.camera;
-
-import android.graphics.Matrix;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera;
-import android.hardware.Camera.Face;
-import android.hardware.Camera.FaceDetectionListener;
-import android.hardware.Camera.Parameters;
-import android.hardware.Camera.Size;
-import android.os.Handler;
-import android.os.Message;
-import android.util.Log;
-import android.view.Gravity;
-import android.view.MotionEvent;
-import android.view.TextureView;
-import android.view.View;
-import android.view.View.OnClickListener;
-import android.view.View.OnLayoutChangeListener;
-import android.view.ViewGroup;
-import android.view.ViewStub;
-import android.widget.FrameLayout;
-import android.widget.FrameLayout.LayoutParams;
-import android.widget.ImageView;
-import android.widget.Toast;
-
-import com.android.camera.CameraPreference.OnPreferenceChangedListener;
-import com.android.camera.FocusOverlayManager.FocusUI;
-import com.android.camera.ui.AbstractSettingPopup;
-import com.android.camera.ui.CameraSwitcher.CameraSwitchListener;
-import com.android.camera.ui.CountDownView;
-import com.android.camera.ui.CountDownView.OnCountDownFinishedListener;
-import com.android.camera.ui.CameraSwitcher;
-import com.android.camera.ui.FaceView;
-import com.android.camera.ui.FocusIndicator;
-import com.android.camera.ui.PieRenderer;
-import com.android.camera.ui.PieRenderer.PieListener;
-import com.android.camera.ui.RenderOverlay;
-import com.android.camera.ui.ZoomRenderer;
-import com.android.gallery3d.R;
-import com.android.gallery3d.common.ApiHelper;
-
-import java.io.IOException;
-import java.util.List;
-
-public class NewPhotoUI implements PieListener,
- NewPreviewGestures.SingleTapListener,
- FocusUI, TextureView.SurfaceTextureListener,
- LocationManager.Listener,
- FaceDetectionListener {
-
- private static final String TAG = "CAM_UI";
- private static final int UPDATE_TRANSFORM_MATRIX = 1;
- private NewCameraActivity mActivity;
- private PhotoController mController;
- private NewPreviewGestures mGestures;
-
- private View mRootView;
- private Object mSurfaceTexture;
-
- private AbstractSettingPopup mPopup;
- private ShutterButton mShutterButton;
- private CountDownView mCountDownView;
-
- private FaceView mFaceView;
- private RenderOverlay mRenderOverlay;
- private View mReviewCancelButton;
- private View mReviewDoneButton;
- private View mReviewRetakeButton;
-
- private View mMenuButton;
- private View mBlocker;
- private NewPhotoMenu mMenu;
- private CameraSwitcher mSwitcher;
- private View mCameraControls;
-
- // Small indicators which show the camera settings in the viewfinder.
- private OnScreenIndicators mOnScreenIndicators;
-
- private PieRenderer mPieRenderer;
- private ZoomRenderer mZoomRenderer;
- private Toast mNotSelectableToast;
-
- private int mZoomMax;
- private List<Integer> mZoomRatios;
-
- private int mPreviewWidth = 0;
- private int mPreviewHeight = 0;
- private float mSurfaceTextureUncroppedWidth;
- private float mSurfaceTextureUncroppedHeight;
-
- private View mPreviewThumb;
-
- private SurfaceTextureSizeChangedListener mSurfaceTextureSizeListener;
- private TextureView mTextureView;
- private Matrix mMatrix = null;
- private float mAspectRatio = 4f / 3f;
- private final Object mLock = new Object();
- private final Handler mHandler = new Handler() {
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
- case UPDATE_TRANSFORM_MATRIX:
- setTransformMatrix(mPreviewWidth, mPreviewHeight);
- break;
- default:
- break;
- }
- }
- };
-
- public interface SurfaceTextureSizeChangedListener {
- public void onSurfaceTextureSizeChanged(int uncroppedWidth, int uncroppedHeight);
- }
-
- private OnLayoutChangeListener mLayoutListener = new OnLayoutChangeListener() {
- @Override
- public void onLayoutChange(View v, int left, int top, int right,
- int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) {
- int width = right - left;
- int height = bottom - top;
- // Full-screen screennail
- int w = width;
- int h = height;
- if (Util.getDisplayRotation(mActivity) % 180 != 0) {
- w = height;
- h = width;
- }
- if (mPreviewWidth != width || mPreviewHeight != height) {
- mPreviewWidth = width;
- mPreviewHeight = height;
- onScreenSizeChanged(width, height, w, h);
- mController.onScreenSizeChanged(width, height, w, h);
- }
- }
- };
-
- public NewPhotoUI(NewCameraActivity activity, PhotoController controller, View parent) {
- mActivity = activity;
- mController = controller;
- mRootView = parent;
-
- mActivity.getLayoutInflater().inflate(R.layout.new_photo_module,
- (ViewGroup) mRootView, true);
- mRenderOverlay = (RenderOverlay) mRootView.findViewById(R.id.render_overlay);
- // display the view
- mTextureView = (TextureView) mRootView.findViewById(R.id.preview_content);
- mTextureView.setSurfaceTextureListener(this);
- mTextureView.addOnLayoutChangeListener(mLayoutListener);
- initIndicators();
-
- mShutterButton = (ShutterButton) mRootView.findViewById(R.id.shutter_button);
- mSwitcher = (CameraSwitcher) mRootView.findViewById(R.id.camera_switcher);
- mSwitcher.setCurrentIndex(0);
- mSwitcher.setSwitchListener((CameraSwitchListener) mActivity);
- mMenuButton = mRootView.findViewById(R.id.menu);
- if (ApiHelper.HAS_FACE_DETECTION) {
- ViewStub faceViewStub = (ViewStub) mRootView
- .findViewById(R.id.face_view_stub);
- if (faceViewStub != null) {
- faceViewStub.inflate();
- mFaceView = (FaceView) mRootView.findViewById(R.id.face_view);
- setSurfaceTextureSizeChangedListener(
- (SurfaceTextureSizeChangedListener) mFaceView);
- }
- }
- mCameraControls = mRootView.findViewById(R.id.camera_controls);
- }
-
- public void onScreenSizeChanged(int width, int height, int previewWidth, int previewHeight) {
- setTransformMatrix(width, height);
- }
-
- public void setSurfaceTextureSizeChangedListener(SurfaceTextureSizeChangedListener listener) {
- mSurfaceTextureSizeListener = listener;
- }
-
- public void setPreviewSize(Size size) {
- int width = size.width;
- int height = size.height;
- if (width == 0 || height == 0) {
- Log.w(TAG, "Preview size should not be 0.");
- return;
- }
- if (width > height) {
- mAspectRatio = (float) width / height;
- } else {
- mAspectRatio = (float) height / width;
- }
- mHandler.sendEmptyMessage(UPDATE_TRANSFORM_MATRIX);
- }
-
- private void setTransformMatrix(int width, int height) {
- mMatrix = mTextureView.getTransform(mMatrix);
- int orientation = Util.getDisplayRotation(mActivity);
- float scaleX = 1f, scaleY = 1f;
- float scaledTextureWidth, scaledTextureHeight;
- if (width > height) {
- scaledTextureWidth = Math.max(width,
- (int) (height * mAspectRatio));
- scaledTextureHeight = Math.max(height,
- (int)(width / mAspectRatio));
- } else {
- scaledTextureWidth = Math.max(width,
- (int) (height / mAspectRatio));
- scaledTextureHeight = Math.max(height,
- (int) (width * mAspectRatio));
- }
-
- if (mSurfaceTextureUncroppedWidth != scaledTextureWidth ||
- mSurfaceTextureUncroppedHeight != scaledTextureHeight) {
- mSurfaceTextureUncroppedWidth = scaledTextureWidth;
- mSurfaceTextureUncroppedHeight = scaledTextureHeight;
- if (mSurfaceTextureSizeListener != null) {
- mSurfaceTextureSizeListener.onSurfaceTextureSizeChanged(
- (int) mSurfaceTextureUncroppedWidth, (int) mSurfaceTextureUncroppedHeight);
- }
- }
- scaleX = scaledTextureWidth / width;
- scaleY = scaledTextureHeight / height;
- mMatrix.setScale(scaleX, scaleY, (float) width / 2, (float) height / 2);
- mTextureView.setTransform(mMatrix);
- }
-
- public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
- synchronized (mLock) {
- mSurfaceTexture = surface;
- mLock.notifyAll();
- }
- }
-
- public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
- // Ignored, Camera does all the work for us
- }
-
- public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
- mSurfaceTexture = null;
- mController.stopPreview();
- Log.w(TAG, "surfaceTexture is destroyed");
- return true;
- }
-
- public void onSurfaceTextureUpdated(SurfaceTexture surface) {
- // Invoked every time there's a new Camera preview frame
- }
-
- public View getRootView() {
- return mRootView;
- }
-
- private void initIndicators() {
- mOnScreenIndicators = new OnScreenIndicators(mActivity,
- mRootView.findViewById(R.id.on_screen_indicators));
- }
-
- public void onCameraOpened(PreferenceGroup prefGroup, ComboPreferences prefs,
- Camera.Parameters params, OnPreferenceChangedListener listener) {
- if (mPieRenderer == null) {
- mPieRenderer = new PieRenderer(mActivity);
- mPieRenderer.setPieListener(this);
- mRenderOverlay.addRenderer(mPieRenderer);
- }
-
- if (mMenu == null) {
- mMenu = new NewPhotoMenu(mActivity, this, mPieRenderer);
- mMenu.setListener(listener);
- }
- mMenu.initialize(prefGroup);
-
- if (mZoomRenderer == null) {
- mZoomRenderer = new ZoomRenderer(mActivity);
- mRenderOverlay.addRenderer(mZoomRenderer);
- }
-
- if (mGestures == null) {
- // this will handle gesture disambiguation and dispatching
- mGestures = new NewPreviewGestures(mActivity, this, mZoomRenderer, mPieRenderer);
- mRenderOverlay.setGestures(mGestures);
- }
- mGestures.setZoomEnabled(params.isZoomSupported());
- mGestures.setRenderOverlay(mRenderOverlay);
- mRenderOverlay.requestLayout();
-
- initializeZoom(params);
- updateOnScreenIndicators(params, prefGroup, prefs);
- }
-
- private void openMenu() {
- if (mPieRenderer != null) {
- // If autofocus is not finished, cancel autofocus so that the
- // subsequent touch can be handled by PreviewGestures
- if (mController.getCameraState() == PhotoController.FOCUSING) {
- mController.cancelAutoFocus();
- }
- mPieRenderer.showInCenter();
- }
- }
-
- public void initializeControlByIntent() {
- mBlocker = mRootView.findViewById(R.id.blocker);
- mPreviewThumb = mActivity.findViewById(R.id.preview_thumb);
- mPreviewThumb.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(View v) {
- // TODO: go to filmstrip
- // mActivity.gotoGallery();
- }
- });
- mMenuButton = mRootView.findViewById(R.id.menu);
- mMenuButton.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(View v) {
- openMenu();
- }
- });
- if (mController.isImageCaptureIntent()) {
- hideSwitcher();
- ViewGroup cameraControls = (ViewGroup) mRootView.findViewById(R.id.camera_controls);
- mActivity.getLayoutInflater().inflate(R.layout.review_module_control, cameraControls);
-
- mReviewDoneButton = mRootView.findViewById(R.id.btn_done);
- mReviewCancelButton = mRootView.findViewById(R.id.btn_cancel);
- mReviewRetakeButton = mRootView.findViewById(R.id.btn_retake);
- mReviewCancelButton.setVisibility(View.VISIBLE);
-
- mReviewDoneButton.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(View v) {
- mController.onCaptureDone();
- }
- });
- mReviewCancelButton.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(View v) {
- mController.onCaptureCancelled();
- }
- });
-
- mReviewRetakeButton.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(View v) {
- mController.onCaptureRetake();
- }
- });
- }
- }
-
- public void hideUI() {
- mCameraControls.setVisibility(View.INVISIBLE);
- hideSwitcher();
- mShutterButton.setVisibility(View.GONE);
- }
-
- public void showUI() {
- mCameraControls.setVisibility(View.VISIBLE);
- showSwitcher();
- mShutterButton.setVisibility(View.VISIBLE);
- }
-
- public void hideSwitcher() {
- mSwitcher.closePopup();
- mSwitcher.setVisibility(View.INVISIBLE);
- }
-
- public void showSwitcher() {
- mSwitcher.setVisibility(View.VISIBLE);
- }
-
- // called from onResume but only the first time
- public void initializeFirstTime() {
- // Initialize shutter button.
- mShutterButton.setImageResource(R.drawable.btn_new_shutter);
- mShutterButton.setOnShutterButtonListener(mController);
- mShutterButton.setVisibility(View.VISIBLE);
- }
-
- // called from onResume every other time
- public void initializeSecondTime(Camera.Parameters params) {
- initializeZoom(params);
- if (mController.isImageCaptureIntent()) {
- hidePostCaptureAlert();
- }
- if (mMenu != null) {
- mMenu.reloadPreferences();
- }
- }
-
- public void initializeZoom(Camera.Parameters params) {
- if ((params == null) || !params.isZoomSupported()
- || (mZoomRenderer == null)) return;
- mZoomMax = params.getMaxZoom();
- mZoomRatios = params.getZoomRatios();
- // Currently we use immediate zoom for fast zooming to get better UX and
- // there is no plan to take advantage of the smooth zoom.
- if (mZoomRenderer != null) {
- mZoomRenderer.setZoomMax(mZoomMax);
- mZoomRenderer.setZoom(params.getZoom());
- mZoomRenderer.setZoomValue(mZoomRatios.get(params.getZoom()));
- mZoomRenderer.setOnZoomChangeListener(new ZoomChangeListener());
- }
- }
-
- public void showGpsOnScreenIndicator(boolean hasSignal) { }
-
- public void hideGpsOnScreenIndicator() { }
-
- public void overrideSettings(final String ... keyvalues) {
- mMenu.overrideSettings(keyvalues);
- }
-
- public void updateOnScreenIndicators(Camera.Parameters params,
- PreferenceGroup group, ComboPreferences prefs) {
- if (params == null) return;
- mOnScreenIndicators.updateSceneOnScreenIndicator(params.getSceneMode());
- mOnScreenIndicators.updateExposureOnScreenIndicator(params,
- CameraSettings.readExposure(prefs));
- mOnScreenIndicators.updateFlashOnScreenIndicator(params.getFlashMode());
- int wbIndex = 2;
- ListPreference pref = group.findPreference(CameraSettings.KEY_WHITE_BALANCE);
- if (pref != null) {
- wbIndex = pref.getCurrentIndex();
- }
- mOnScreenIndicators.updateWBIndicator(wbIndex);
- boolean location = RecordLocationPreference.get(
- prefs, mActivity.getContentResolver());
- mOnScreenIndicators.updateLocationIndicator(location);
- }
-
- public void setCameraState(int state) {
- }
-
- public void enableGestures(boolean enable) {
- if (mGestures != null) {
- mGestures.setEnabled(enable);
- }
- }
-
- // forward from preview gestures to controller
- @Override
- public void onSingleTapUp(View view, int x, int y) {
- mController.onSingleTapUp(view, x, y);
- }
-
- public boolean onBackPressed() {
- if (mPieRenderer != null && mPieRenderer.showsItems()) {
- mPieRenderer.hide();
- return true;
- }
- // In image capture mode, back button should:
- // 1) if there is any popup, dismiss them, 2) otherwise, get out of
- // image capture
- if (mController.isImageCaptureIntent()) {
- if (!removeTopLevelPopup()) {
- // no popup to dismiss, cancel image capture
- mController.onCaptureCancelled();
- }
- return true;
- } else if (!mController.isCameraIdle()) {
- // ignore backs while we're taking a picture
- return true;
- } else {
- return removeTopLevelPopup();
- }
- }
-
- public void onFullScreenChanged(boolean full) {
- if (mFaceView != null) {
- mFaceView.setBlockDraw(!full);
- }
- if (mPopup != null) {
- dismissPopup(full);
- }
- if (mGestures != null) {
- mGestures.setEnabled(full);
- }
- if (mRenderOverlay != null) {
- // this can not happen in capture mode
- mRenderOverlay.setVisibility(full ? View.VISIBLE : View.GONE);
- }
- if (mPieRenderer != null) {
- mPieRenderer.setBlockFocus(!full);
- }
- setShowMenu(full);
- if (mBlocker != null) {
- mBlocker.setVisibility(full ? View.VISIBLE : View.GONE);
- }
- if (!full && mCountDownView != null) mCountDownView.cancelCountDown();
- }
-
- public void enablePreviewThumb(boolean enabled) {
- if (enabled) {
- mPreviewThumb.setVisibility(View.VISIBLE);
- } else {
- mPreviewThumb.setVisibility(View.GONE);
- }
- }
-
- public boolean removeTopLevelPopup() {
- // Remove the top level popup or dialog box and return true if there's any
- if (mPopup != null) {
- dismissPopup();
- return true;
- }
- return false;
- }
-
- public void showPopup(AbstractSettingPopup popup) {
- hideUI();
- mBlocker.setVisibility(View.INVISIBLE);
- setShowMenu(false);
- mPopup = popup;
- mPopup.setVisibility(View.VISIBLE);
- FrameLayout.LayoutParams lp = new FrameLayout.LayoutParams(LayoutParams.WRAP_CONTENT,
- LayoutParams.WRAP_CONTENT);
- lp.gravity = Gravity.CENTER;
- ((FrameLayout) mRootView).addView(mPopup, lp);
- }
-
- public void dismissPopup() {
- dismissPopup(true);
- }
-
- private void dismissPopup(boolean fullScreen) {
- if (fullScreen) {
- showUI();
- mBlocker.setVisibility(View.VISIBLE);
- }
- setShowMenu(fullScreen);
- if (mPopup != null) {
- ((FrameLayout) mRootView).removeView(mPopup);
- mPopup = null;
- }
- mMenu.popupDismissed();
- }
-
- public void onShowSwitcherPopup() {
- if (mPieRenderer != null && mPieRenderer.showsItems()) {
- mPieRenderer.hide();
- }
- }
-
- private void setShowMenu(boolean show) {
- if (mOnScreenIndicators != null) {
- mOnScreenIndicators.setVisibility(show ? View.VISIBLE : View.GONE);
- }
- if (mMenuButton != null) {
- mMenuButton.setVisibility(show ? View.VISIBLE : View.GONE);
- }
- }
-
- public boolean collapseCameraControls() {
- // Remove all the popups/dialog boxes
- boolean ret = false;
- if (mPopup != null) {
- dismissPopup();
- ret = true;
- }
- onShowSwitcherPopup();
- return ret;
- }
-
- protected void showPostCaptureAlert() {
- mOnScreenIndicators.setVisibility(View.GONE);
- mMenuButton.setVisibility(View.GONE);
- Util.fadeIn(mReviewDoneButton);
- mShutterButton.setVisibility(View.INVISIBLE);
- Util.fadeIn(mReviewRetakeButton);
- pauseFaceDetection();
- }
-
- protected void hidePostCaptureAlert() {
- mOnScreenIndicators.setVisibility(View.VISIBLE);
- mMenuButton.setVisibility(View.VISIBLE);
- Util.fadeOut(mReviewDoneButton);
- mShutterButton.setVisibility(View.VISIBLE);
- Util.fadeOut(mReviewRetakeButton);
- resumeFaceDetection();
- }
-
- public void setDisplayOrientation(int orientation) {
- if (mFaceView != null) {
- mFaceView.setDisplayOrientation(orientation);
- }
- }
-
- // shutter button handling
-
- public boolean isShutterPressed() {
- return mShutterButton.isPressed();
- }
-
- public void enableShutter(boolean enabled) {
- if (mShutterButton != null) {
- mShutterButton.setEnabled(enabled);
- }
- }
-
- public void pressShutterButton() {
- if (mShutterButton.isInTouchMode()) {
- mShutterButton.requestFocusFromTouch();
- } else {
- mShutterButton.requestFocus();
- }
- mShutterButton.setPressed(true);
- }
-
- private class ZoomChangeListener implements ZoomRenderer.OnZoomChangedListener {
- @Override
- public void onZoomValueChanged(int index) {
- int newZoom = mController.onZoomChanged(index);
- if (mZoomRenderer != null) {
- mZoomRenderer.setZoomValue(mZoomRatios.get(newZoom));
- }
- }
-
- @Override
- public void onZoomStart() {
- if (mPieRenderer != null) {
- mPieRenderer.setBlockFocus(true);
- }
- }
-
- @Override
- public void onZoomEnd() {
- if (mPieRenderer != null) {
- mPieRenderer.setBlockFocus(false);
- }
- }
- }
-
- @Override
- public void onPieOpened(int centerX, int centerY) {
- setSwipingEnabled(false);
- dismissPopup();
- if (mFaceView != null) {
- mFaceView.setBlockDraw(true);
- }
- }
-
- @Override
- public void onPieClosed() {
- setSwipingEnabled(true);
- if (mFaceView != null) {
- mFaceView.setBlockDraw(false);
- }
- }
-
- public void setSwipingEnabled(boolean enable) {
- mActivity.setSwipingEnabled(enable);
- }
-
- public Object getSurfaceTexture() {
- synchronized (mLock) {
- if (mSurfaceTexture == null) {
- try {
- mLock.wait();
- } catch (InterruptedException e) {
- Log.w(TAG, "Unexpected interruption when waiting to get surface texture");
- }
- }
- }
- return mSurfaceTexture;
- }
-
- // Countdown timer
-
- private void initializeCountDown() {
- mActivity.getLayoutInflater().inflate(R.layout.count_down_to_capture,
- (ViewGroup) mRootView, true);
- mCountDownView = (CountDownView) (mRootView.findViewById(R.id.count_down_to_capture));
- mCountDownView.setCountDownFinishedListener((OnCountDownFinishedListener) mController);
- }
-
- public boolean isCountingDown() {
- return mCountDownView != null && mCountDownView.isCountingDown();
- }
-
- public void cancelCountDown() {
- if (mCountDownView == null) return;
- mCountDownView.cancelCountDown();
- }
-
- public void startCountDown(int sec, boolean playSound) {
- if (mCountDownView == null) initializeCountDown();
- mCountDownView.startCountDown(sec, playSound);
- }
-
- public void showPreferencesToast() {
- if (mNotSelectableToast == null) {
- String str = mActivity.getResources().getString(R.string.not_selectable_in_scene_mode);
- mNotSelectableToast = Toast.makeText(mActivity, str, Toast.LENGTH_SHORT);
- }
- mNotSelectableToast.show();
- }
-
- public void onPause() {
- cancelCountDown();
-
- // Clear UI.
- collapseCameraControls();
- if (mFaceView != null) mFaceView.clear();
-
- mPreviewWidth = 0;
- mPreviewHeight = 0;
- }
-
- // focus UI implementation
-
- private FocusIndicator getFocusIndicator() {
- return (mFaceView != null && mFaceView.faceExists()) ? mFaceView : mPieRenderer;
- }
-
- @Override
- public boolean hasFaces() {
- return (mFaceView != null && mFaceView.faceExists());
- }
-
- public void clearFaces() {
- if (mFaceView != null) mFaceView.clear();
- }
-
- @Override
- public void clearFocus() {
- FocusIndicator indicator = getFocusIndicator();
- if (indicator != null) indicator.clear();
- }
-
- @Override
- public void setFocusPosition(int x, int y) {
- mPieRenderer.setFocus(x, y);
- }
-
- @Override
- public void onFocusStarted() {
- getFocusIndicator().showStart();
- }
-
- @Override
- public void onFocusSucceeded(boolean timeout) {
- getFocusIndicator().showSuccess(timeout);
- }
-
- @Override
- public void onFocusFailed(boolean timeout) {
- getFocusIndicator().showFail(timeout);
- }
-
- @Override
- public void pauseFaceDetection() {
- if (mFaceView != null) mFaceView.pause();
- }
-
- @Override
- public void resumeFaceDetection() {
- if (mFaceView != null) mFaceView.resume();
- }
-
- public void onStartFaceDetection(int orientation, boolean mirror) {
- mFaceView.clear();
- mFaceView.setVisibility(View.VISIBLE);
- mFaceView.setDisplayOrientation(orientation);
- mFaceView.setMirror(mirror);
- mFaceView.resume();
- }
-
- @Override
- public void onFaceDetection(Face[] faces, android.hardware.Camera camera) {
- mFaceView.setFaces(faces);
- }
-
-}
diff --git a/src/com/android/camera/NewPreviewGestures.java b/src/com/android/camera/NewPreviewGestures.java
deleted file mode 100644
index 339c4b33f..000000000
--- a/src/com/android/camera/NewPreviewGestures.java
+++ /dev/null
@@ -1,263 +0,0 @@
-package com.android.camera;
-
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import android.os.Handler;
-import android.os.Message;
-import android.util.Log;
-import android.view.GestureDetector;
-import android.view.MotionEvent;
-import android.view.ScaleGestureDetector;
-import android.view.View;
-import android.view.ViewConfiguration;
-
-import com.android.camera.PreviewGestures.SingleTapListener;
-import com.android.camera.PreviewGestures.SwipeListener;
-import com.android.camera.ui.PieRenderer;
-import com.android.camera.ui.RenderOverlay;
-import com.android.camera.ui.ZoomRenderer;
-import com.android.gallery3d.R;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/* NewPreviewGestures disambiguates touch events received on RenderOverlay
- * and dispatch them to the proper recipient (i.e. zoom renderer or pie renderer).
- * Touch events on CameraControls will be handled by framework.
- * */
-public class NewPreviewGestures
- implements ScaleGestureDetector.OnScaleGestureListener {
-
- private static final String TAG = "CAM_gestures";
-
- private static final long TIMEOUT_PIE = 200;
- private static final int MSG_PIE = 1;
- private static final int MODE_NONE = 0;
- private static final int MODE_PIE = 1;
- private static final int MODE_ZOOM = 2;
- private static final int MODE_MODULE = 3;
- private static final int MODE_ALL = 4;
- private static final int MODE_SWIPE = 5;
-
- public static final int DIR_UP = 0;
- public static final int DIR_DOWN = 1;
- public static final int DIR_LEFT = 2;
- public static final int DIR_RIGHT = 3;
-
- private NewCameraActivity mActivity;
- private SingleTapListener mTapListener;
- private RenderOverlay mOverlay;
- private PieRenderer mPie;
- private ZoomRenderer mZoom;
- private MotionEvent mDown;
- private MotionEvent mCurrent;
- private ScaleGestureDetector mScale;
- private int mMode;
- private int mSlop;
- private int mTapTimeout;
- private boolean mZoomEnabled;
- private boolean mEnabled;
- private boolean mZoomOnly;
- private int mOrientation;
- private GestureDetector mGestureDetector;
-
- private GestureDetector.SimpleOnGestureListener mGestureListener = new GestureDetector.SimpleOnGestureListener() {
- @Override
- public void onLongPress (MotionEvent e) {
- // Open pie
- if (mPie != null && !mPie.showsItems()) {
- openPie();
- }
- }
-
- @Override
- public boolean onSingleTapUp (MotionEvent e) {
- // Tap to focus when pie is not open
- if (mPie == null || !mPie.showsItems()) {
- mTapListener.onSingleTapUp(null, (int) e.getX(), (int) e.getY());
- return true;
- }
- return false;
- }
-
- @Override
- public boolean onScroll (MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
- if (mMode == MODE_ZOOM) return false;
- int deltaX = (int) (e1.getX() - e2.getX());
- int deltaY = (int) (e1.getY() - e2.getY());
- if (deltaY > 2 * deltaX && deltaY > -2 * deltaX) {
- // Open pie on swipe up
- if (mPie != null && !mPie.showsItems()) {
- openPie();
- return true;
- }
- }
- return false;
- }
- };
-
- private Handler mHandler = new Handler() {
- public void handleMessage(Message msg) {
- if (msg.what == MSG_PIE) {
- mMode = MODE_PIE;
- openPie();
- }
- }
- };
-
- public interface SingleTapListener {
- public void onSingleTapUp(View v, int x, int y);
- }
-
- public NewPreviewGestures(NewCameraActivity ctx, SingleTapListener tapListener,
- ZoomRenderer zoom, PieRenderer pie) {
- mActivity = ctx;
- mTapListener = tapListener;
- mPie = pie;
- mZoom = zoom;
- mMode = MODE_ALL;
- mScale = new ScaleGestureDetector(ctx, this);
- mSlop = (int) ctx.getResources().getDimension(R.dimen.pie_touch_slop);
- mTapTimeout = ViewConfiguration.getTapTimeout();
- mEnabled = true;
- mGestureDetector = new GestureDetector(mGestureListener);
- }
-
- public void setRenderOverlay(RenderOverlay overlay) {
- mOverlay = overlay;
- }
-
- public void setOrientation(int orientation) {
- mOrientation = orientation;
- }
-
- public void setEnabled(boolean enabled) {
- mEnabled = enabled;
- }
-
- public void setZoomEnabled(boolean enable) {
- mZoomEnabled = enable;
- }
-
- public void setZoomOnly(boolean zoom) {
- mZoomOnly = zoom;
- }
-
- public boolean isEnabled() {
- return mEnabled;
- }
-
- public boolean dispatchTouch(MotionEvent m) {
- if (!mEnabled) {
- return false;
- }
- mCurrent = m;
- if (MotionEvent.ACTION_DOWN == m.getActionMasked()) {
- mMode = MODE_NONE;
- mDown = MotionEvent.obtain(m);
- }
-
- // If pie is open, redirects all the touch events to pie.
- if (mPie != null && mPie.isOpen()) {
- return sendToPie(m);
- }
-
- // If pie is not open, send touch events to gesture detector and scale
- // listener to recognize the gesture.
- mGestureDetector.onTouchEvent(m);
- if (mZoom != null) {
- mScale.onTouchEvent(m);
- if (MotionEvent.ACTION_POINTER_DOWN == m.getActionMasked()) {
- mMode = MODE_ZOOM;
- if (mZoomEnabled) {
- // Start showing zoom UI as soon as there is a second finger down
- mZoom.onScaleBegin(mScale);
- }
- } else if (MotionEvent.ACTION_POINTER_UP == m.getActionMasked()) {
- mZoom.onScaleEnd(mScale);
- }
- }
- return true;
- }
-
- // left tests for finger moving right to left
- private int getSwipeDirection(MotionEvent m) {
- float dx = 0;
- float dy = 0;
- switch (mOrientation) {
- case 0:
- dx = m.getX() - mDown.getX();
- dy = m.getY() - mDown.getY();
- break;
- case 90:
- dx = - (m.getY() - mDown.getY());
- dy = m.getX() - mDown.getX();
- break;
- case 180:
- dx = -(m.getX() - mDown.getX());
- dy = m.getY() - mDown.getY();
- break;
- case 270:
- dx = m.getY() - mDown.getY();
- dy = m.getX() - mDown.getX();
- break;
- }
- if (dx < 0 && (Math.abs(dy) / -dx < 2)) return DIR_LEFT;
- if (dx > 0 && (Math.abs(dy) / dx < 2)) return DIR_RIGHT;
- if (dy > 0) return DIR_DOWN;
- return DIR_UP;
- }
-
- private MotionEvent makeCancelEvent(MotionEvent m) {
- MotionEvent c = MotionEvent.obtain(m);
- c.setAction(MotionEvent.ACTION_CANCEL);
- return c;
- }
-
- private void openPie() {
- mGestureDetector.onTouchEvent(makeCancelEvent(mDown));
- mScale.onTouchEvent(makeCancelEvent(mDown));
- mOverlay.directDispatchTouch(mDown, mPie);
- }
-
- private boolean sendToPie(MotionEvent m) {
- return mOverlay.directDispatchTouch(m, mPie);
- }
-
- // OnScaleGestureListener implementation
- @Override
- public boolean onScale(ScaleGestureDetector detector) {
- return mZoom.onScale(detector);
- }
-
- @Override
- public boolean onScaleBegin(ScaleGestureDetector detector) {
- if (mPie == null || !mPie.isOpen()) {
- mMode = MODE_ZOOM;
- mGestureDetector.onTouchEvent(makeCancelEvent(mCurrent));
- if (!mZoomEnabled) return false;
- return mZoom.onScaleBegin(detector);
- }
- return false;
- }
-
- @Override
- public void onScaleEnd(ScaleGestureDetector detector) {
- mZoom.onScaleEnd(detector);
- }
-}
-
diff --git a/src/com/android/camera/NewVideoMenu.java b/src/com/android/camera/NewVideoMenu.java
deleted file mode 100644
index 70f73ec39..000000000
--- a/src/com/android/camera/NewVideoMenu.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.app.Activity;
-import android.content.Context;
-import android.view.LayoutInflater;
-
-import com.android.camera.ui.AbstractSettingPopup;
-import com.android.camera.ui.ListPrefSettingPopup;
-import com.android.camera.ui.MoreSettingPopup;
-import com.android.camera.ui.PieItem;
-import com.android.camera.ui.PieItem.OnClickListener;
-import com.android.camera.ui.PieRenderer;
-import com.android.camera.ui.TimeIntervalPopup;
-import com.android.gallery3d.R;
-
-public class NewVideoMenu extends PieController
- implements MoreSettingPopup.Listener,
- ListPrefSettingPopup.Listener,
- TimeIntervalPopup.Listener {
-
- private static String TAG = "CAM_VideoMenu";
-
- private NewVideoUI mUI;
- private String[] mOtherKeys;
- private AbstractSettingPopup mPopup;
-
- private static final int POPUP_NONE = 0;
- private static final int POPUP_FIRST_LEVEL = 1;
- private static final int POPUP_SECOND_LEVEL = 2;
- private int mPopupStatus;
- private NewCameraActivity mActivity;
-
- public NewVideoMenu(NewCameraActivity activity, NewVideoUI ui, PieRenderer pie) {
- super(activity, pie);
- mUI = ui;
- mActivity = activity;
- }
-
-
- public void initialize(PreferenceGroup group) {
- super.initialize(group);
- mPopup = null;
- mPopupStatus = POPUP_NONE;
- PieItem item = null;
- // white balance
- if (group.findPreference(CameraSettings.KEY_WHITE_BALANCE) != null) {
- item = makeItem(CameraSettings.KEY_WHITE_BALANCE);
- mRenderer.addItem(item);
- }
- // settings popup
- mOtherKeys = new String[] {
- CameraSettings.KEY_VIDEO_EFFECT,
- CameraSettings.KEY_VIDEO_TIME_LAPSE_FRAME_INTERVAL,
- CameraSettings.KEY_VIDEO_QUALITY,
- CameraSettings.KEY_RECORD_LOCATION
- };
- item = makeItem(R.drawable.ic_settings_holo_light);
- item.setLabel(mActivity.getResources().getString(R.string.camera_menu_settings_label));
- item.setOnClickListener(new OnClickListener() {
- @Override
- public void onClick(PieItem item) {
- if (mPopup == null || mPopupStatus != POPUP_FIRST_LEVEL) {
- initializePopup();
- mPopupStatus = POPUP_FIRST_LEVEL;
- }
- mUI.showPopup(mPopup);
- }
- });
- mRenderer.addItem(item);
- // camera switcher
- if (group.findPreference(CameraSettings.KEY_CAMERA_ID) != null) {
- item = makeItem(R.drawable.ic_switch_back);
- IconListPreference lpref = (IconListPreference) group.findPreference(
- CameraSettings.KEY_CAMERA_ID);
- item.setLabel(lpref.getLabel());
- item.setImageResource(mActivity,
- ((IconListPreference) lpref).getIconIds()
- [lpref.findIndexOfValue(lpref.getValue())]);
-
- final PieItem fitem = item;
- item.setOnClickListener(new OnClickListener() {
-
- @Override
- public void onClick(PieItem item) {
- // Find the index of next camera.
- ListPreference pref =
- mPreferenceGroup.findPreference(CameraSettings.KEY_CAMERA_ID);
- if (pref != null) {
- int index = pref.findIndexOfValue(pref.getValue());
- CharSequence[] values = pref.getEntryValues();
- index = (index + 1) % values.length;
- int newCameraId = Integer.parseInt((String) values[index]);
- fitem.setImageResource(mActivity,
- ((IconListPreference) pref).getIconIds()[index]);
- fitem.setLabel(pref.getLabel());
- mListener.onCameraPickerClicked(newCameraId);
- }
- }
- });
- mRenderer.addItem(item);
- }
- // flash
- if (group.findPreference(CameraSettings.KEY_VIDEOCAMERA_FLASH_MODE) != null) {
- item = makeItem(CameraSettings.KEY_VIDEOCAMERA_FLASH_MODE);
- mRenderer.addItem(item);
- }
- }
-
- @Override
- public void reloadPreferences() {
- super.reloadPreferences();
- if (mPopup != null) {
- mPopup.reloadPreference();
- }
- }
-
- @Override
- public void overrideSettings(final String ... keyvalues) {
- super.overrideSettings(keyvalues);
- if (mPopup == null || mPopupStatus != POPUP_FIRST_LEVEL) {
- mPopupStatus = POPUP_FIRST_LEVEL;
- initializePopup();
- }
- ((MoreSettingPopup) mPopup).overrideSettings(keyvalues);
- }
-
- @Override
- // Hit when an item in the second-level popup gets selected
- public void onListPrefChanged(ListPreference pref) {
- if (mPopup != null) {
- if (mPopupStatus == POPUP_SECOND_LEVEL) {
- mUI.dismissPopup(true);
- }
- }
- super.onSettingChanged(pref);
- }
-
- protected void initializePopup() {
- LayoutInflater inflater = (LayoutInflater) mActivity.getSystemService(
- Context.LAYOUT_INFLATER_SERVICE);
-
- MoreSettingPopup popup = (MoreSettingPopup) inflater.inflate(
- R.layout.more_setting_popup, null, false);
- popup.setSettingChangedListener(this);
- popup.initialize(mPreferenceGroup, mOtherKeys);
- if (mActivity.isSecureCamera()) {
- // Prevent location preference from getting changed in secure camera mode
- popup.setPreferenceEnabled(CameraSettings.KEY_RECORD_LOCATION, false);
- }
- mPopup = popup;
- }
-
- public void popupDismissed(boolean topPopupOnly) {
- // if the 2nd level popup gets dismissed
- if (mPopupStatus == POPUP_SECOND_LEVEL) {
- initializePopup();
- mPopupStatus = POPUP_FIRST_LEVEL;
- if (topPopupOnly) mUI.showPopup(mPopup);
- }
- }
-
- @Override
- // Hit when an item in the first-level popup gets selected, then bring up
- // the second-level popup
- public void onPreferenceClicked(ListPreference pref) {
- if (mPopupStatus != POPUP_FIRST_LEVEL) return;
-
- LayoutInflater inflater = (LayoutInflater) mActivity.getSystemService(
- Context.LAYOUT_INFLATER_SERVICE);
-
- if (CameraSettings.KEY_VIDEO_TIME_LAPSE_FRAME_INTERVAL.equals(pref.getKey())) {
- TimeIntervalPopup timeInterval = (TimeIntervalPopup) inflater.inflate(
- R.layout.time_interval_popup, null, false);
- timeInterval.initialize((IconListPreference) pref);
- timeInterval.setSettingChangedListener(this);
- mUI.dismissPopup(true);
- mPopup = timeInterval;
- } else {
- ListPrefSettingPopup basic = (ListPrefSettingPopup) inflater.inflate(
- R.layout.list_pref_setting_popup, null, false);
- basic.initialize(pref);
- basic.setSettingChangedListener(this);
- mUI.dismissPopup(true);
- mPopup = basic;
- }
- mUI.showPopup(mPopup);
- mPopupStatus = POPUP_SECOND_LEVEL;
- }
-}
diff --git a/src/com/android/camera/NewVideoModule.java b/src/com/android/camera/NewVideoModule.java
deleted file mode 100644
index 8bec332f9..000000000
--- a/src/com/android/camera/NewVideoModule.java
+++ /dev/null
@@ -1,2253 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package com.android.camera;
-
-import android.annotation.TargetApi;
-import android.app.Activity;
-import android.content.ActivityNotFoundException;
-import android.content.BroadcastReceiver;
-import android.content.ContentResolver;
-import android.content.ContentValues;
-import android.content.Context;
-import android.content.Intent;
-import android.content.IntentFilter;
-import android.content.SharedPreferences.Editor;
-import android.content.res.Configuration;
-import android.graphics.Bitmap;
-import android.graphics.SurfaceTexture;
-import android.hardware.Camera.CameraInfo;
-import android.hardware.Camera.Parameters;
-import android.hardware.Camera.PictureCallback;
-import android.hardware.Camera.Size;
-import android.location.Location;
-import android.media.CamcorderProfile;
-import android.media.CameraProfile;
-import android.media.MediaRecorder;
-import android.net.Uri;
-import android.os.Build;
-import android.os.Bundle;
-import android.os.Handler;
-import android.os.Message;
-import android.os.ParcelFileDescriptor;
-import android.os.SystemClock;
-import android.provider.MediaStore;
-import android.provider.MediaStore.MediaColumns;
-import android.provider.MediaStore.Video;
-import android.util.Log;
-import android.view.KeyEvent;
-import android.view.MotionEvent;
-import android.view.OrientationEventListener;
-import android.view.Surface;
-import android.view.View;
-import android.view.WindowManager;
-import android.widget.Toast;
-
-import com.android.camera.CameraManager.CameraProxy;
-import com.android.camera.ui.PopupManager;
-import com.android.camera.ui.RotateTextToast;
-import com.android.gallery3d.R;
-import com.android.gallery3d.app.OrientationManager;
-import com.android.gallery3d.common.ApiHelper;
-import com.android.gallery3d.exif.ExifInterface;
-import com.android.gallery3d.util.AccessibilityUtils;
-import com.android.gallery3d.util.UsageStatistics;
-
-import java.io.File;
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-
-public class NewVideoModule implements NewCameraModule,
- VideoController,
- CameraPreference.OnPreferenceChangedListener,
- ShutterButton.OnShutterButtonListener,
- MediaRecorder.OnErrorListener,
- MediaRecorder.OnInfoListener,
- EffectsRecorder.EffectsListener {
-
- private static final String TAG = "CAM_VideoModule";
-
- // We number the request code from 1000 to avoid collision with Gallery.
- private static final int REQUEST_EFFECT_BACKDROPPER = 1000;
-
- private static final int CHECK_DISPLAY_ROTATION = 3;
- private static final int CLEAR_SCREEN_DELAY = 4;
- private static final int UPDATE_RECORD_TIME = 5;
- private static final int ENABLE_SHUTTER_BUTTON = 6;
- private static final int SHOW_TAP_TO_SNAPSHOT_TOAST = 7;
- private static final int SWITCH_CAMERA = 8;
- private static final int SWITCH_CAMERA_START_ANIMATION = 9;
- private static final int HIDE_SURFACE_VIEW = 10;
- private static final int CAPTURE_ANIMATION_DONE = 11;
-
- private static final int SCREEN_DELAY = 2 * 60 * 1000;
-
- private static final long SHUTTER_BUTTON_TIMEOUT = 500L; // 500ms
-
- /**
- * An unpublished intent flag requesting to start recording straight away
- * and return as soon as recording is stopped.
- * TODO: consider publishing by moving into MediaStore.
- */
- private static final String EXTRA_QUICK_CAPTURE =
- "android.intent.extra.quickCapture";
-
- private static final int MIN_THUMB_SIZE = 64;
- // module fields
- private NewCameraActivity mActivity;
- private boolean mPaused;
- private int mCameraId;
- private Parameters mParameters;
-
- private Boolean mCameraOpened = false;
- private boolean mIsInReviewMode;
- private boolean mSnapshotInProgress = false;
-
- private static final String EFFECT_BG_FROM_GALLERY = "gallery";
-
- private final CameraErrorCallback mErrorCallback = new CameraErrorCallback();
-
- private ComboPreferences mPreferences;
- private PreferenceGroup mPreferenceGroup;
-
- private boolean mIsVideoCaptureIntent;
- private boolean mQuickCapture;
-
- private MediaRecorder mMediaRecorder;
- private EffectsRecorder mEffectsRecorder;
- private boolean mEffectsDisplayResult;
-
- private int mEffectType = EffectsRecorder.EFFECT_NONE;
- private Object mEffectParameter = null;
- private String mEffectUriFromGallery = null;
- private String mPrefVideoEffectDefault;
- private boolean mResetEffect = true;
-
- private boolean mSwitchingCamera;
- private boolean mMediaRecorderRecording = false;
- private long mRecordingStartTime;
- private boolean mRecordingTimeCountsDown = false;
- private long mOnResumeTime;
- // The video file that the hardware camera is about to record into
- // (or is recording into.)
- private String mVideoFilename;
- private ParcelFileDescriptor mVideoFileDescriptor;
-
- // The video file that has already been recorded, and that is being
- // examined by the user.
- private String mCurrentVideoFilename;
- private Uri mCurrentVideoUri;
- private ContentValues mCurrentVideoValues;
-
- private CamcorderProfile mProfile;
-
- // The video duration limit. 0 menas no limit.
- private int mMaxVideoDurationInMs;
-
- // Time Lapse parameters.
- private boolean mCaptureTimeLapse = false;
- // Default 0. If it is larger than 0, the camcorder is in time lapse mode.
- private int mTimeBetweenTimeLapseFrameCaptureMs = 0;
-
- boolean mPreviewing = false; // True if preview is started.
- // The display rotation in degrees. This is only valid when mPreviewing is
- // true.
- private int mDisplayRotation;
- private int mCameraDisplayOrientation;
-
- private int mDesiredPreviewWidth;
- private int mDesiredPreviewHeight;
- private ContentResolver mContentResolver;
-
- private LocationManager mLocationManager;
- private OrientationManager mOrientationManager;
-
- private Surface mSurface;
- private int mPendingSwitchCameraId;
- private boolean mOpenCameraFail;
- private boolean mCameraDisabled;
- private final Handler mHandler = new MainHandler();
- private NewVideoUI mUI;
- private CameraProxy mCameraDevice;
-
- // The degrees of the device rotated clockwise from its natural orientation.
- private int mOrientation = OrientationEventListener.ORIENTATION_UNKNOWN;
-
- private int mZoomValue; // The current zoom value.
-
- private boolean mRestoreFlash; // This is used to check if we need to restore the flash
- // status when going back from gallery.
-
- private final MediaSaveService.OnMediaSavedListener mOnVideoSavedListener =
- new MediaSaveService.OnMediaSavedListener() {
- @Override
- public void onMediaSaved(Uri uri) {
- if (uri != null) {
- mActivity.sendBroadcast(
- new Intent(Util.ACTION_NEW_VIDEO, uri));
- Util.broadcastNewPicture(mActivity, uri);
- }
- }
- };
-
- private final MediaSaveService.OnMediaSavedListener mOnPhotoSavedListener =
- new MediaSaveService.OnMediaSavedListener() {
- @Override
- public void onMediaSaved(Uri uri) {
- if (uri != null) {
- Util.broadcastNewPicture(mActivity, uri);
- }
- }
- };
-
-
- protected class CameraOpenThread extends Thread {
- @Override
- public void run() {
- openCamera();
- }
- }
-
- private void openCamera() {
- try {
- synchronized(mCameraOpened) {
- if (!mCameraOpened) {
- mCameraDevice = Util.openCamera(mActivity, mCameraId);
- mCameraOpened = true;
- }
- }
- mParameters = mCameraDevice.getParameters();
- } catch (CameraHardwareException e) {
- mOpenCameraFail = true;
- } catch (CameraDisabledException e) {
- mCameraDisabled = true;
- }
- }
-
- // This Handler is used to post message back onto the main thread of the
- // application
- private class MainHandler extends Handler {
- @Override
- public void handleMessage(Message msg) {
- switch (msg.what) {
-
- case ENABLE_SHUTTER_BUTTON:
- mUI.enableShutter(true);
- break;
-
- case CLEAR_SCREEN_DELAY: {
- mActivity.getWindow().clearFlags(
- WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- break;
- }
-
- case UPDATE_RECORD_TIME: {
- updateRecordingTime();
- break;
- }
-
- case CHECK_DISPLAY_ROTATION: {
- // Restart the preview if display rotation has changed.
- // Sometimes this happens when the device is held upside
- // down and camera app is opened. Rotation animation will
- // take some time and the rotation value we have got may be
- // wrong. Framework does not have a callback for this now.
- if ((Util.getDisplayRotation(mActivity) != mDisplayRotation)
- && !mMediaRecorderRecording && !mSwitchingCamera) {
- startPreview();
- }
- if (SystemClock.uptimeMillis() - mOnResumeTime < 5000) {
- mHandler.sendEmptyMessageDelayed(CHECK_DISPLAY_ROTATION, 100);
- }
- break;
- }
-
- case SHOW_TAP_TO_SNAPSHOT_TOAST: {
- showTapToSnapshotToast();
- break;
- }
-
- case SWITCH_CAMERA: {
- switchCamera();
- break;
- }
-
- case SWITCH_CAMERA_START_ANIMATION: {
- //TODO:
- //((CameraScreenNail) mActivity.mCameraScreenNail).animateSwitchCamera();
-
- // Enable all camera controls.
- mSwitchingCamera = false;
- break;
- }
-
- case CAPTURE_ANIMATION_DONE: {
- mUI.enablePreviewThumb(false);
- break;
- }
-
- default:
- Log.v(TAG, "Unhandled message: " + msg.what);
- break;
- }
- }
- }
-
- private BroadcastReceiver mReceiver = null;
-
- private class MyBroadcastReceiver extends BroadcastReceiver {
- @Override
- public void onReceive(Context context, Intent intent) {
- String action = intent.getAction();
- if (action.equals(Intent.ACTION_MEDIA_EJECT)) {
- stopVideoRecording();
- } else if (action.equals(Intent.ACTION_MEDIA_SCANNER_STARTED)) {
- Toast.makeText(mActivity,
- mActivity.getResources().getString(R.string.wait), Toast.LENGTH_LONG).show();
- }
- }
- }
-
- private String createName(long dateTaken) {
- Date date = new Date(dateTaken);
- SimpleDateFormat dateFormat = new SimpleDateFormat(
- mActivity.getString(R.string.video_file_name_format));
-
- return dateFormat.format(date);
- }
-
- private int getPreferredCameraId(ComboPreferences preferences) {
- int intentCameraId = Util.getCameraFacingIntentExtras(mActivity);
- if (intentCameraId != -1) {
- // Testing purpose. Launch a specific camera through the intent
- // extras.
- return intentCameraId;
- } else {
- return CameraSettings.readPreferredCameraId(preferences);
- }
- }
-
- private void initializeSurfaceView() {
- if (!ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) { // API level < 16
- mUI.initializeSurfaceView();
- }
- }
-
- @Override
- public void init(NewCameraActivity activity, View root) {
- mActivity = activity;
- mUI = new NewVideoUI(activity, this, root);
- mPreferences = new ComboPreferences(mActivity);
- CameraSettings.upgradeGlobalPreferences(mPreferences.getGlobal());
- mCameraId = getPreferredCameraId(mPreferences);
-
- mPreferences.setLocalId(mActivity, mCameraId);
- CameraSettings.upgradeLocalPreferences(mPreferences.getLocal());
-
- mPrefVideoEffectDefault = mActivity.getString(R.string.pref_video_effect_default);
- resetEffect();
- mOrientationManager = new OrientationManager(mActivity);
-
- /*
- * To reduce startup time, we start the preview in another thread.
- * We make sure the preview is started at the end of onCreate.
- */
- CameraOpenThread cameraOpenThread = new CameraOpenThread();
- cameraOpenThread.start();
-
- mContentResolver = mActivity.getContentResolver();
-
- // Surface texture is from camera screen nail and startPreview needs it.
- // This must be done before startPreview.
- mIsVideoCaptureIntent = isVideoCaptureIntent();
- initializeSurfaceView();
-
- // Make sure camera device is opened.
- try {
- cameraOpenThread.join();
- if (mOpenCameraFail) {
- Util.showErrorAndFinish(mActivity, R.string.cannot_connect_camera);
- return;
- } else if (mCameraDisabled) {
- Util.showErrorAndFinish(mActivity, R.string.camera_disabled);
- return;
- }
- } catch (InterruptedException ex) {
- // ignore
- }
-
- readVideoPreferences();
- mUI.setPrefChangedListener(this);
- new Thread(new Runnable() {
- @Override
- public void run() {
- startPreview();
- }
- }).start();
-
- mQuickCapture = mActivity.getIntent().getBooleanExtra(EXTRA_QUICK_CAPTURE, false);
- mLocationManager = new LocationManager(mActivity, null);
-
- mUI.setOrientationIndicator(0, false);
- setDisplayOrientation();
-
- mUI.showTimeLapseUI(mCaptureTimeLapse);
- initializeVideoSnapshot();
- resizeForPreviewAspectRatio();
-
- initializeVideoControl();
- mPendingSwitchCameraId = -1;
- mUI.updateOnScreenIndicators(mParameters, mPreferences);
-
- // Disable the shutter button if effects are ON since it might take
- // a little more time for the effects preview to be ready. We do not
- // want to allow recording before that happens. The shutter button
- // will be enabled when we get the message from effectsrecorder that
- // the preview is running. This becomes critical when the camera is
- // swapped.
- if (effectsActive()) {
- mUI.enableShutter(false);
- }
- }
-
- // SingleTapListener
- // Preview area is touched. Take a picture.
- @Override
- public void onSingleTapUp(View view, int x, int y) {
- if (mMediaRecorderRecording && effectsActive()) {
- new RotateTextToast(mActivity, R.string.disable_video_snapshot_hint,
- mOrientation).show();
- return;
- }
-
- MediaSaveService s = mActivity.getMediaSaveService();
- if (mPaused || mSnapshotInProgress || effectsActive() || s == null || s.isQueueFull()) {
- return;
- }
-
- if (!mMediaRecorderRecording) {
- // check for dismissing popup
- mUI.dismissPopup(true);
- return;
- }
-
- // Set rotation and gps data.
- int rotation = Util.getJpegRotation(mCameraId, mOrientation);
- mParameters.setRotation(rotation);
- Location loc = mLocationManager.getCurrentLocation();
- Util.setGpsParameters(mParameters, loc);
- mCameraDevice.setParameters(mParameters);
-
- Log.v(TAG, "Video snapshot start");
- mCameraDevice.takePicture(null, null, null, new JpegPictureCallback(loc));
- showVideoSnapshotUI(true);
- mSnapshotInProgress = true;
- UsageStatistics.onEvent(UsageStatistics.COMPONENT_CAMERA,
- UsageStatistics.ACTION_CAPTURE_DONE, "VideoSnapshot");
- }
-
- @Override
- public void onStop() {}
-
- private void loadCameraPreferences() {
- CameraSettings settings = new CameraSettings(mActivity, mParameters,
- mCameraId, CameraHolder.instance().getCameraInfo());
- // Remove the video quality preference setting when the quality is given in the intent.
- mPreferenceGroup = filterPreferenceScreenByIntent(
- settings.getPreferenceGroup(R.xml.video_preferences));
- }
-
- private void initializeVideoControl() {
- loadCameraPreferences();
- mUI.initializePopup(mPreferenceGroup);
- if (effectsActive()) {
- mUI.overrideSettings(
- CameraSettings.KEY_VIDEO_QUALITY,
- Integer.toString(getLowVideoQuality()));
- }
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB)
- private static int getLowVideoQuality() {
- if (ApiHelper.HAS_FINE_RESOLUTION_QUALITY_LEVELS) {
- return CamcorderProfile.QUALITY_480P;
- } else {
- return CamcorderProfile.QUALITY_LOW;
- }
- }
-
-
- @Override
- public void onOrientationChanged(int orientation) {
- // We keep the last known orientation. So if the user first orient
- // the camera then point the camera to floor or sky, we still have
- // the correct orientation.
- if (orientation == OrientationEventListener.ORIENTATION_UNKNOWN) return;
- int newOrientation = Util.roundOrientation(orientation, mOrientation);
-
- if (mOrientation != newOrientation) {
- mOrientation = newOrientation;
- // The input of effects recorder is affected by
- // android.hardware.Camera.setDisplayOrientation. Its value only
- // compensates the camera orientation (no Display.getRotation).
- // So the orientation hint here should only consider sensor
- // orientation.
- if (effectsActive()) {
- mEffectsRecorder.setOrientationHint(mOrientation);
- }
- }
-
- // Show the toast after getting the first orientation changed.
- if (mHandler.hasMessages(SHOW_TAP_TO_SNAPSHOT_TOAST)) {
- mHandler.removeMessages(SHOW_TAP_TO_SNAPSHOT_TOAST);
- showTapToSnapshotToast();
- }
- }
-
- private void startPlayVideoActivity() {
- Intent intent = new Intent(Intent.ACTION_VIEW);
- intent.setDataAndType(mCurrentVideoUri, convertOutputFormatToMimeType(mProfile.fileFormat));
- try {
- mActivity.startActivity(intent);
- } catch (ActivityNotFoundException ex) {
- Log.e(TAG, "Couldn't view video " + mCurrentVideoUri, ex);
- }
- }
-
- @OnClickAttr
- public void onReviewPlayClicked(View v) {
- startPlayVideoActivity();
- }
-
- @OnClickAttr
- public void onReviewDoneClicked(View v) {
- mIsInReviewMode = false;
- doReturnToCaller(true);
- }
-
- @OnClickAttr
- public void onReviewCancelClicked(View v) {
- mIsInReviewMode = false;
- stopVideoRecording();
- doReturnToCaller(false);
- }
-
- @Override
- public boolean isInReviewMode() {
- return mIsInReviewMode;
- }
-
- private void onStopVideoRecording() {
- mEffectsDisplayResult = true;
- boolean recordFail = stopVideoRecording();
- if (mIsVideoCaptureIntent) {
- if (!effectsActive()) {
- if (mQuickCapture) {
- doReturnToCaller(!recordFail);
- } else if (!recordFail) {
- showCaptureResult();
- }
- }
- } else if (!recordFail){
- // Start capture animation.
- if (!mPaused && ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) {
- // The capture animation is disabled on ICS because we use SurfaceView
- // for preview during recording. When the recording is done, we switch
- // back to use SurfaceTexture for preview and we need to stop then start
- // the preview. This will cause the preview flicker since the preview
- // will not be continuous for a short period of time.
- // TODO: need to get the capture animation to work
- // ((CameraScreenNail) mActivity.mCameraScreenNail).animateCapture(mDisplayRotation);
-
- mUI.enablePreviewThumb(true);
-
- // Make sure to disable the thumbnail preview after the
- // animation is done to disable the click target.
- mHandler.removeMessages(CAPTURE_ANIMATION_DONE);
- mHandler.sendEmptyMessageDelayed(CAPTURE_ANIMATION_DONE,
- CaptureAnimManager.getAnimationDuration());
- }
- }
- }
-
- public void onProtectiveCurtainClick(View v) {
- // Consume clicks
- }
-
- @Override
- public void onShutterButtonClick() {
- if (mUI.collapseCameraControls() || mSwitchingCamera) return;
-
- boolean stop = mMediaRecorderRecording;
-
- if (stop) {
- onStopVideoRecording();
- } else {
- startVideoRecording();
- }
- mUI.enableShutter(false);
-
- // Keep the shutter button disabled when in video capture intent
- // mode and recording is stopped. It'll be re-enabled when
- // re-take button is clicked.
- if (!(mIsVideoCaptureIntent && stop)) {
- mHandler.sendEmptyMessageDelayed(
- ENABLE_SHUTTER_BUTTON, SHUTTER_BUTTON_TIMEOUT);
- }
- }
-
- @Override
- public void onShutterButtonFocus(boolean pressed) {
- mUI.setShutterPressed(pressed);
- }
-
- private void readVideoPreferences() {
- // The preference stores values from ListPreference and is thus string type for all values.
- // We need to convert it to int manually.
- String defaultQuality = CameraSettings.getDefaultVideoQuality(mCameraId,
- mActivity.getResources().getString(R.string.pref_video_quality_default));
- String videoQuality =
- mPreferences.getString(CameraSettings.KEY_VIDEO_QUALITY,
- defaultQuality);
- int quality = Integer.valueOf(videoQuality);
-
- // Set video quality.
- Intent intent = mActivity.getIntent();
- if (intent.hasExtra(MediaStore.EXTRA_VIDEO_QUALITY)) {
- int extraVideoQuality =
- intent.getIntExtra(MediaStore.EXTRA_VIDEO_QUALITY, 0);
- if (extraVideoQuality > 0) {
- quality = CamcorderProfile.QUALITY_HIGH;
- } else { // 0 is mms.
- quality = CamcorderProfile.QUALITY_LOW;
- }
- }
-
- // Set video duration limit. The limit is read from the preference,
- // unless it is specified in the intent.
- if (intent.hasExtra(MediaStore.EXTRA_DURATION_LIMIT)) {
- int seconds =
- intent.getIntExtra(MediaStore.EXTRA_DURATION_LIMIT, 0);
- mMaxVideoDurationInMs = 1000 * seconds;
- } else {
- mMaxVideoDurationInMs = CameraSettings.getMaxVideoDuration(mActivity);
- }
-
- // Set effect
- mEffectType = CameraSettings.readEffectType(mPreferences);
- if (mEffectType != EffectsRecorder.EFFECT_NONE) {
- mEffectParameter = CameraSettings.readEffectParameter(mPreferences);
- // Set quality to be no higher than 480p.
- CamcorderProfile profile = CamcorderProfile.get(mCameraId, quality);
- if (profile.videoFrameHeight > 480) {
- quality = getLowVideoQuality();
- }
- } else {
- mEffectParameter = null;
- }
- // Read time lapse recording interval.
- if (ApiHelper.HAS_TIME_LAPSE_RECORDING) {
- String frameIntervalStr = mPreferences.getString(
- CameraSettings.KEY_VIDEO_TIME_LAPSE_FRAME_INTERVAL,
- mActivity.getString(R.string.pref_video_time_lapse_frame_interval_default));
- mTimeBetweenTimeLapseFrameCaptureMs = Integer.parseInt(frameIntervalStr);
- mCaptureTimeLapse = (mTimeBetweenTimeLapseFrameCaptureMs != 0);
- }
- // TODO: This should be checked instead directly +1000.
- if (mCaptureTimeLapse) quality += 1000;
- mProfile = CamcorderProfile.get(mCameraId, quality);
- getDesiredPreviewSize();
- }
-
- private void writeDefaultEffectToPrefs() {
- ComboPreferences.Editor editor = mPreferences.edit();
- editor.putString(CameraSettings.KEY_VIDEO_EFFECT,
- mActivity.getString(R.string.pref_video_effect_default));
- editor.apply();
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB)
- private void getDesiredPreviewSize() {
- mParameters = mCameraDevice.getParameters();
- if (ApiHelper.HAS_GET_SUPPORTED_VIDEO_SIZE) {
- if (mParameters.getSupportedVideoSizes() == null || effectsActive()) {
- mDesiredPreviewWidth = mProfile.videoFrameWidth;
- mDesiredPreviewHeight = mProfile.videoFrameHeight;
- } else { // Driver supports separates outputs for preview and video.
- List<Size> sizes = mParameters.getSupportedPreviewSizes();
- Size preferred = mParameters.getPreferredPreviewSizeForVideo();
- int product = preferred.width * preferred.height;
- Iterator<Size> it = sizes.iterator();
- // Remove the preview sizes that are not preferred.
- while (it.hasNext()) {
- Size size = it.next();
- if (size.width * size.height > product) {
- it.remove();
- }
- }
- Size optimalSize = Util.getOptimalPreviewSize(mActivity, sizes,
- (double) mProfile.videoFrameWidth / mProfile.videoFrameHeight);
- mDesiredPreviewWidth = optimalSize.width;
- mDesiredPreviewHeight = optimalSize.height;
- }
- } else {
- mDesiredPreviewWidth = mProfile.videoFrameWidth;
- mDesiredPreviewHeight = mProfile.videoFrameHeight;
- }
- mUI.setPreviewSize(mDesiredPreviewWidth, mDesiredPreviewHeight);
- Log.v(TAG, "mDesiredPreviewWidth=" + mDesiredPreviewWidth +
- ". mDesiredPreviewHeight=" + mDesiredPreviewHeight);
- }
-
- private void resizeForPreviewAspectRatio() {
- mUI.setAspectRatio(
- (double) mProfile.videoFrameWidth / mProfile.videoFrameHeight);
- }
-
- @Override
- public void installIntentFilter() {
- // install an intent filter to receive SD card related events.
- IntentFilter intentFilter =
- new IntentFilter(Intent.ACTION_MEDIA_EJECT);
- intentFilter.addAction(Intent.ACTION_MEDIA_SCANNER_STARTED);
- intentFilter.addDataScheme("file");
- mReceiver = new MyBroadcastReceiver();
- mActivity.registerReceiver(mReceiver, intentFilter);
- }
-
- @Override
- public void onResumeBeforeSuper() {
- mPaused = false;
- }
-
- @Override
- public void onResumeAfterSuper() {
- if (mOpenCameraFail || mCameraDisabled)
- return;
- mUI.enableShutter(false);
- mZoomValue = 0;
-
- showVideoSnapshotUI(false);
-
- if (!mPreviewing) {
- resetEffect();
- openCamera();
- if (mOpenCameraFail) {
- Util.showErrorAndFinish(mActivity,
- R.string.cannot_connect_camera);
- return;
- } else if (mCameraDisabled) {
- Util.showErrorAndFinish(mActivity, R.string.camera_disabled);
- return;
- }
- readVideoPreferences();
- resizeForPreviewAspectRatio();
- new Thread(new Runnable() {
- @Override
- public void run() {
- startPreview();
- }
- }).start();
- } else {
- // preview already started
- mUI.enableShutter(true);
- }
-
- // Initializing it here after the preview is started.
- mUI.initializeZoom(mParameters);
-
- keepScreenOnAwhile();
-
- // Initialize location service.
- boolean recordLocation = RecordLocationPreference.get(mPreferences,
- mContentResolver);
- mLocationManager.recordLocation(recordLocation);
-
- if (mPreviewing) {
- mOnResumeTime = SystemClock.uptimeMillis();
- mHandler.sendEmptyMessageDelayed(CHECK_DISPLAY_ROTATION, 100);
- }
- // Dismiss open menu if exists.
- PopupManager.getInstance(mActivity).notifyShowPopup(null);
-
- UsageStatistics.onContentViewChanged(
- UsageStatistics.COMPONENT_CAMERA, "VideoModule");
- }
-
- private void setDisplayOrientation() {
- mDisplayRotation = Util.getDisplayRotation(mActivity);
- mCameraDisplayOrientation = Util.getDisplayOrientation(mDisplayRotation, mCameraId);
- // Change the camera display orientation
- if (mCameraDevice != null) {
- mCameraDevice.setDisplayOrientation(mCameraDisplayOrientation);
- }
- }
-
- @Override
- public int onZoomChanged(int index) {
- // Not useful to change zoom value when the activity is paused.
- if (mPaused) return index;
- mZoomValue = index;
- if (mParameters == null || mCameraDevice == null) return index;
- // Set zoom parameters asynchronously
- mParameters.setZoom(mZoomValue);
- mCameraDevice.setParameters(mParameters);
- Parameters p = mCameraDevice.getParameters();
- if (p != null) return p.getZoom();
- return index;
- }
- private void startPreview() {
- Log.v(TAG, "startPreview");
-
- mCameraDevice.setErrorCallback(mErrorCallback);
- if (mPreviewing == true) {
- stopPreview();
- if (effectsActive() && mEffectsRecorder != null) {
- mEffectsRecorder.release();
- mEffectsRecorder = null;
- }
- }
-
- setDisplayOrientation();
- mCameraDevice.setDisplayOrientation(mCameraDisplayOrientation);
- setCameraParameters();
-
- try {
- if (!effectsActive()) {
- SurfaceTexture surfaceTexture = mUI.getSurfaceTexture();
- if (surfaceTexture == null) {
- return; // The texture has been destroyed (pause, etc)
- }
- mCameraDevice.setPreviewTextureAsync(surfaceTexture);
- mCameraDevice.startPreviewAsync();
- mPreviewing = true;
- onPreviewStarted();
- } else {
- initializeEffectsPreview();
- mEffectsRecorder.startPreview();
- mPreviewing = true;
- onPreviewStarted();
- }
- } catch (Throwable ex) {
- closeCamera();
- throw new RuntimeException("startPreview failed", ex);
- } finally {
- mActivity.runOnUiThread(new Runnable() {
- @Override
- public void run() {
- if (mOpenCameraFail) {
- Util.showErrorAndFinish(mActivity, R.string.cannot_connect_camera);
- } else if (mCameraDisabled) {
- Util.showErrorAndFinish(mActivity, R.string.camera_disabled);
- }
- }
- });
- }
-
- }
-
- private void onPreviewStarted() {
- mUI.enableShutter(true);
- }
-
- @Override
- public void stopPreview() {
- if (!mPreviewing) return;
- mCameraDevice.stopPreview();
- mPreviewing = false;
- }
-
- // Closing the effects out. Will shut down the effects graph.
- private void closeEffects() {
- Log.v(TAG, "Closing effects");
- mEffectType = EffectsRecorder.EFFECT_NONE;
- if (mEffectsRecorder == null) {
- Log.d(TAG, "Effects are already closed. Nothing to do");
- return;
- }
- // This call can handle the case where the camera is already released
- // after the recording has been stopped.
- mEffectsRecorder.release();
- mEffectsRecorder = null;
- }
-
- // By default, we want to close the effects as well with the camera.
- private void closeCamera() {
- closeCamera(true);
- }
-
- // In certain cases, when the effects are active, we may want to shutdown
- // only the camera related parts, and handle closing the effects in the
- // effectsUpdate callback.
- // For example, in onPause, we want to make the camera available to
- // outside world immediately, however, want to wait till the effects
- // callback to shut down the effects. In such a case, we just disconnect
- // the effects from the camera by calling disconnectCamera. That way
- // the effects can handle that when shutting down.
- //
- // @param closeEffectsAlso - indicates whether we want to close the
- // effects also along with the camera.
- private void closeCamera(boolean closeEffectsAlso) {
- Log.v(TAG, "closeCamera");
- if (mCameraDevice == null) {
- Log.d(TAG, "already stopped.");
- return;
- }
-
- if (mEffectsRecorder != null) {
- // Disconnect the camera from effects so that camera is ready to
- // be released to the outside world.
- mEffectsRecorder.disconnectCamera();
- }
- if (closeEffectsAlso) closeEffects();
- mCameraDevice.setZoomChangeListener(null);
- mCameraDevice.setErrorCallback(null);
- synchronized(mCameraOpened) {
- if (mCameraOpened) {
- CameraHolder.instance().release();
- }
- mCameraOpened = false;
- }
- mCameraDevice = null;
- mPreviewing = false;
- mSnapshotInProgress = false;
- }
-
- private void releasePreviewResources() {
- if (!ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) {
- mUI.hideSurfaceView();
- }
- }
-
- @Override
- public void onPauseBeforeSuper() {
- mPaused = true;
-
- if (mMediaRecorderRecording) {
- // Camera will be released in onStopVideoRecording.
- onStopVideoRecording();
- } else {
- closeCamera();
- if (!effectsActive()) releaseMediaRecorder();
- }
- if (effectsActive()) {
- // If the effects are active, make sure we tell the graph that the
- // surfacetexture is not valid anymore. Disconnect the graph from
- // the display. This should be done before releasing the surface
- // texture.
- mEffectsRecorder.disconnectDisplay();
- } else {
- // Close the file descriptor and clear the video namer only if the
- // effects are not active. If effects are active, we need to wait
- // till we get the callback from the Effects that the graph is done
- // recording. That also needs a change in the stopVideoRecording()
- // call to not call closeCamera if the effects are active, because
- // that will close down the effects are well, thus making this if
- // condition invalid.
- closeVideoFileDescriptor();
- }
-
- releasePreviewResources();
-
- if (mReceiver != null) {
- mActivity.unregisterReceiver(mReceiver);
- mReceiver = null;
- }
- resetScreenOn();
-
- if (mLocationManager != null) mLocationManager.recordLocation(false);
-
- mHandler.removeMessages(CHECK_DISPLAY_ROTATION);
- mHandler.removeMessages(SWITCH_CAMERA);
- mHandler.removeMessages(SWITCH_CAMERA_START_ANIMATION);
- mPendingSwitchCameraId = -1;
- mSwitchingCamera = false;
- // Call onPause after stopping video recording. So the camera can be
- // released as soon as possible.
- }
-
- @Override
- public void onPauseAfterSuper() {
- }
-
- @Override
- public void onUserInteraction() {
- if (!mMediaRecorderRecording && !mActivity.isFinishing()) {
- keepScreenOnAwhile();
- }
- }
-
- @Override
- public boolean onBackPressed() {
- if (mPaused) return true;
- if (mMediaRecorderRecording) {
- onStopVideoRecording();
- return true;
- } else if (mUI.hidePieRenderer()) {
- return true;
- } else {
- return mUI.removeTopLevelPopup();
- }
- }
-
- @Override
- public boolean onKeyDown(int keyCode, KeyEvent event) {
- // Do not handle any key if the activity is paused.
- if (mPaused) {
- return true;
- }
-
- switch (keyCode) {
- case KeyEvent.KEYCODE_CAMERA:
- if (event.getRepeatCount() == 0) {
- mUI.clickShutter();
- return true;
- }
- break;
- case KeyEvent.KEYCODE_DPAD_CENTER:
- if (event.getRepeatCount() == 0) {
- mUI.clickShutter();
- return true;
- }
- break;
- case KeyEvent.KEYCODE_MENU:
- if (mMediaRecorderRecording) return true;
- break;
- }
- return false;
- }
-
- @Override
- public boolean onKeyUp(int keyCode, KeyEvent event) {
- switch (keyCode) {
- case KeyEvent.KEYCODE_CAMERA:
- mUI.pressShutter(false);
- return true;
- }
- return false;
- }
-
- @Override
- public boolean isVideoCaptureIntent() {
- String action = mActivity.getIntent().getAction();
- return (MediaStore.ACTION_VIDEO_CAPTURE.equals(action));
- }
-
- private void doReturnToCaller(boolean valid) {
- Intent resultIntent = new Intent();
- int resultCode;
- if (valid) {
- resultCode = Activity.RESULT_OK;
- resultIntent.setData(mCurrentVideoUri);
- } else {
- resultCode = Activity.RESULT_CANCELED;
- }
- mActivity.setResultEx(resultCode, resultIntent);
- mActivity.finish();
- }
-
- private void cleanupEmptyFile() {
- if (mVideoFilename != null) {
- File f = new File(mVideoFilename);
- if (f.length() == 0 && f.delete()) {
- Log.v(TAG, "Empty video file deleted: " + mVideoFilename);
- mVideoFilename = null;
- }
- }
- }
-
- private void setupMediaRecorderPreviewDisplay() {
- // Nothing to do here if using SurfaceTexture.
- if (!ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) {
- // We stop the preview here before unlocking the device because we
- // need to change the SurfaceTexture to SurfaceView for preview.
- stopPreview();
- mCameraDevice.setPreviewDisplayAsync(mUI.getSurfaceHolder());
- // The orientation for SurfaceTexture is different from that for
- // SurfaceView. For SurfaceTexture we don't need to consider the
- // display rotation. Just consider the sensor's orientation and we
- // will set the orientation correctly when showing the texture.
- // Gallery will handle the orientation for the preview. For
- // SurfaceView we will have to take everything into account so the
- // display rotation is considered.
- mCameraDevice.setDisplayOrientation(
- Util.getDisplayOrientation(mDisplayRotation, mCameraId));
- mCameraDevice.startPreviewAsync();
- mPreviewing = true;
- mMediaRecorder.setPreviewDisplay(mUI.getSurfaceHolder().getSurface());
- }
- }
-
- // Prepares media recorder.
- private void initializeRecorder() {
- Log.v(TAG, "initializeRecorder");
- // If the mCameraDevice is null, then this activity is going to finish
- if (mCameraDevice == null) return;
-
- if (!ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) {
- // Set the SurfaceView to visible so the surface gets created.
- // surfaceCreated() is called immediately when the visibility is
- // changed to visible. Thus, mSurfaceViewReady should become true
- // right after calling setVisibility().
- mUI.showSurfaceView();
- }
-
- Intent intent = mActivity.getIntent();
- Bundle myExtras = intent.getExtras();
-
- long requestedSizeLimit = 0;
- closeVideoFileDescriptor();
- if (mIsVideoCaptureIntent && myExtras != null) {
- Uri saveUri = (Uri) myExtras.getParcelable(MediaStore.EXTRA_OUTPUT);
- if (saveUri != null) {
- try {
- mVideoFileDescriptor =
- mContentResolver.openFileDescriptor(saveUri, "rw");
- mCurrentVideoUri = saveUri;
- } catch (java.io.FileNotFoundException ex) {
- // invalid uri
- Log.e(TAG, ex.toString());
- }
- }
- requestedSizeLimit = myExtras.getLong(MediaStore.EXTRA_SIZE_LIMIT);
- }
- mMediaRecorder = new MediaRecorder();
-
- setupMediaRecorderPreviewDisplay();
- // Unlock the camera object before passing it to media recorder.
- mCameraDevice.unlock();
- mCameraDevice.waitDone();
- mMediaRecorder.setCamera(mCameraDevice.getCamera());
- if (!mCaptureTimeLapse) {
- mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER);
- }
- mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
- mMediaRecorder.setProfile(mProfile);
- mMediaRecorder.setMaxDuration(mMaxVideoDurationInMs);
- if (mCaptureTimeLapse) {
- double fps = 1000 / (double) mTimeBetweenTimeLapseFrameCaptureMs;
- setCaptureRate(mMediaRecorder, fps);
- }
-
- setRecordLocation();
-
- // Set output file.
- // Try Uri in the intent first. If it doesn't exist, use our own
- // instead.
- if (mVideoFileDescriptor != null) {
- mMediaRecorder.setOutputFile(mVideoFileDescriptor.getFileDescriptor());
- } else {
- generateVideoFilename(mProfile.fileFormat);
- mMediaRecorder.setOutputFile(mVideoFilename);
- }
-
- // Set maximum file size.
- long maxFileSize = mActivity.getStorageSpace() - Storage.LOW_STORAGE_THRESHOLD;
- if (requestedSizeLimit > 0 && requestedSizeLimit < maxFileSize) {
- maxFileSize = requestedSizeLimit;
- }
-
- try {
- mMediaRecorder.setMaxFileSize(maxFileSize);
- } catch (RuntimeException exception) {
- // We are going to ignore failure of setMaxFileSize here, as
- // a) The composer selected may simply not support it, or
- // b) The underlying media framework may not handle 64-bit range
- // on the size restriction.
- }
-
- // See android.hardware.Camera.Parameters.setRotation for
- // documentation.
- // Note that mOrientation here is the device orientation, which is the opposite of
- // what activity.getWindowManager().getDefaultDisplay().getRotation() would return,
- // which is the orientation the graphics need to rotate in order to render correctly.
- int rotation = 0;
- if (mOrientation != OrientationEventListener.ORIENTATION_UNKNOWN) {
- CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
- if (info.facing == CameraInfo.CAMERA_FACING_FRONT) {
- rotation = (info.orientation - mOrientation + 360) % 360;
- } else { // back-facing camera
- rotation = (info.orientation + mOrientation) % 360;
- }
- }
- mMediaRecorder.setOrientationHint(rotation);
-
- try {
- mMediaRecorder.prepare();
- } catch (IOException e) {
- Log.e(TAG, "prepare failed for " + mVideoFilename, e);
- releaseMediaRecorder();
- throw new RuntimeException(e);
- }
-
- mMediaRecorder.setOnErrorListener(this);
- mMediaRecorder.setOnInfoListener(this);
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.HONEYCOMB)
- private static void setCaptureRate(MediaRecorder recorder, double fps) {
- recorder.setCaptureRate(fps);
- }
-
- @TargetApi(ApiHelper.VERSION_CODES.ICE_CREAM_SANDWICH)
- private void setRecordLocation() {
- if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH) {
- Location loc = mLocationManager.getCurrentLocation();
- if (loc != null) {
- mMediaRecorder.setLocation((float) loc.getLatitude(),
- (float) loc.getLongitude());
- }
- }
- }
-
- private void initializeEffectsPreview() {
- Log.v(TAG, "initializeEffectsPreview");
- // If the mCameraDevice is null, then this activity is going to finish
- if (mCameraDevice == null) return;
-
- boolean inLandscape = (mActivity.getResources().getConfiguration().orientation
- == Configuration.ORIENTATION_LANDSCAPE);
-
- CameraInfo info = CameraHolder.instance().getCameraInfo()[mCameraId];
-
- mEffectsDisplayResult = false;
- mEffectsRecorder = new EffectsRecorder(mActivity);
-
- // TODO: Confirm none of the following need to go to initializeEffectsRecording()
- // and none of these change even when the preview is not refreshed.
- mEffectsRecorder.setCameraDisplayOrientation(mCameraDisplayOrientation);
- mEffectsRecorder.setCamera(mCameraDevice);
- mEffectsRecorder.setCameraFacing(info.facing);
- mEffectsRecorder.setProfile(mProfile);
- mEffectsRecorder.setEffectsListener(this);
- mEffectsRecorder.setOnInfoListener(this);
- mEffectsRecorder.setOnErrorListener(this);
-
- // The input of effects recorder is affected by
- // android.hardware.Camera.setDisplayOrientation. Its value only
- // compensates the camera orientation (no Display.getRotation). So the
- // orientation hint here should only consider sensor orientation.
- int orientation = 0;
- if (mOrientation != OrientationEventListener.ORIENTATION_UNKNOWN) {
- orientation = mOrientation;
- }
- mEffectsRecorder.setOrientationHint(orientation);
-
- mEffectsRecorder.setPreviewSurfaceTexture(mUI.getSurfaceTexture(),
- mUI.getPreviewWidth(), mUI.getPreviewHeight());
-
- if (mEffectType == EffectsRecorder.EFFECT_BACKDROPPER &&
- ((String) mEffectParameter).equals(EFFECT_BG_FROM_GALLERY)) {
- mEffectsRecorder.setEffect(mEffectType, mEffectUriFromGallery);
- } else {
- mEffectsRecorder.setEffect(mEffectType, mEffectParameter);
- }
- }
-
- private void initializeEffectsRecording() {
- Log.v(TAG, "initializeEffectsRecording");
-
- Intent intent = mActivity.getIntent();
- Bundle myExtras = intent.getExtras();
-
- long requestedSizeLimit = 0;
- closeVideoFileDescriptor();
- if (mIsVideoCaptureIntent && myExtras != null) {
- Uri saveUri = (Uri) myExtras.getParcelable(MediaStore.EXTRA_OUTPUT);
- if (saveUri != null) {
- try {
- mVideoFileDescriptor =
- mContentResolver.openFileDescriptor(saveUri, "rw");
- mCurrentVideoUri = saveUri;
- } catch (java.io.FileNotFoundException ex) {
- // invalid uri
- Log.e(TAG, ex.toString());
- }
- }
- requestedSizeLimit = myExtras.getLong(MediaStore.EXTRA_SIZE_LIMIT);
- }
-
- mEffectsRecorder.setProfile(mProfile);
- // important to set the capture rate to zero if not timelapsed, since the
- // effectsrecorder object does not get created again for each recording
- // session
- if (mCaptureTimeLapse) {
- mEffectsRecorder.setCaptureRate((1000 / (double) mTimeBetweenTimeLapseFrameCaptureMs));
- } else {
- mEffectsRecorder.setCaptureRate(0);
- }
-
- // Set output file
- if (mVideoFileDescriptor != null) {
- mEffectsRecorder.setOutputFile(mVideoFileDescriptor.getFileDescriptor());
- } else {
- generateVideoFilename(mProfile.fileFormat);
- mEffectsRecorder.setOutputFile(mVideoFilename);
- }
-
- // Set maximum file size.
- long maxFileSize = mActivity.getStorageSpace() - Storage.LOW_STORAGE_THRESHOLD;
- if (requestedSizeLimit > 0 && requestedSizeLimit < maxFileSize) {
- maxFileSize = requestedSizeLimit;
- }
- mEffectsRecorder.setMaxFileSize(maxFileSize);
- mEffectsRecorder.setMaxDuration(mMaxVideoDurationInMs);
- }
-
-
- private void releaseMediaRecorder() {
- Log.v(TAG, "Releasing media recorder.");
- if (mMediaRecorder != null) {
- cleanupEmptyFile();
- mMediaRecorder.reset();
- mMediaRecorder.release();
- mMediaRecorder = null;
- }
- mVideoFilename = null;
- }
-
- private void releaseEffectsRecorder() {
- Log.v(TAG, "Releasing effects recorder.");
- if (mEffectsRecorder != null) {
- cleanupEmptyFile();
- mEffectsRecorder.release();
- mEffectsRecorder = null;
- }
- mEffectType = EffectsRecorder.EFFECT_NONE;
- mVideoFilename = null;
- }
-
- private void generateVideoFilename(int outputFileFormat) {
- long dateTaken = System.currentTimeMillis();
- String title = createName(dateTaken);
- // Used when emailing.
- String filename = title + convertOutputFormatToFileExt(outputFileFormat);
- String mime = convertOutputFormatToMimeType(outputFileFormat);
- String path = Storage.DIRECTORY + '/' + filename;
- String tmpPath = path + ".tmp";
- mCurrentVideoValues = new ContentValues(9);
- mCurrentVideoValues.put(Video.Media.TITLE, title);
- mCurrentVideoValues.put(Video.Media.DISPLAY_NAME, filename);
- mCurrentVideoValues.put(Video.Media.DATE_TAKEN, dateTaken);
- mCurrentVideoValues.put(MediaColumns.DATE_MODIFIED, dateTaken / 1000);
- mCurrentVideoValues.put(Video.Media.MIME_TYPE, mime);
- mCurrentVideoValues.put(Video.Media.DATA, path);
- mCurrentVideoValues.put(Video.Media.RESOLUTION,
- Integer.toString(mProfile.videoFrameWidth) + "x" +
- Integer.toString(mProfile.videoFrameHeight));
- Location loc = mLocationManager.getCurrentLocation();
- if (loc != null) {
- mCurrentVideoValues.put(Video.Media.LATITUDE, loc.getLatitude());
- mCurrentVideoValues.put(Video.Media.LONGITUDE, loc.getLongitude());
- }
- mVideoFilename = tmpPath;
- Log.v(TAG, "New video filename: " + mVideoFilename);
- }
-
- private void saveVideo() {
- if (mVideoFileDescriptor == null) {
- long duration = SystemClock.uptimeMillis() - mRecordingStartTime;
- if (duration > 0) {
- if (mCaptureTimeLapse) {
- duration = getTimeLapseVideoLength(duration);
- }
- } else {
- Log.w(TAG, "Video duration <= 0 : " + duration);
- }
- mActivity.getMediaSaveService().addVideo(mCurrentVideoFilename,
- duration, mCurrentVideoValues,
- mOnVideoSavedListener, mContentResolver);
- }
- mCurrentVideoValues = null;
- }
-
- private void deleteVideoFile(String fileName) {
- Log.v(TAG, "Deleting video " + fileName);
- File f = new File(fileName);
- if (!f.delete()) {
- Log.v(TAG, "Could not delete " + fileName);
- }
- }
-
- private PreferenceGroup filterPreferenceScreenByIntent(
- PreferenceGroup screen) {
- Intent intent = mActivity.getIntent();
- if (intent.hasExtra(MediaStore.EXTRA_VIDEO_QUALITY)) {
- CameraSettings.removePreferenceFromScreen(screen,
- CameraSettings.KEY_VIDEO_QUALITY);
- }
-
- if (intent.hasExtra(MediaStore.EXTRA_DURATION_LIMIT)) {
- CameraSettings.removePreferenceFromScreen(screen,
- CameraSettings.KEY_VIDEO_QUALITY);
- }
- return screen;
- }
-
- // from MediaRecorder.OnErrorListener
- @Override
- public void onError(MediaRecorder mr, int what, int extra) {
- Log.e(TAG, "MediaRecorder error. what=" + what + ". extra=" + extra);
- if (what == MediaRecorder.MEDIA_RECORDER_ERROR_UNKNOWN) {
- // We may have run out of space on the sdcard.
- stopVideoRecording();
- mActivity.updateStorageSpaceAndHint();
- }
- }
-
- // from MediaRecorder.OnInfoListener
- @Override
- public void onInfo(MediaRecorder mr, int what, int extra) {
- if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_DURATION_REACHED) {
- if (mMediaRecorderRecording) onStopVideoRecording();
- } else if (what == MediaRecorder.MEDIA_RECORDER_INFO_MAX_FILESIZE_REACHED) {
- if (mMediaRecorderRecording) onStopVideoRecording();
-
- // Show the toast.
- Toast.makeText(mActivity, R.string.video_reach_size_limit,
- Toast.LENGTH_LONG).show();
- }
- }
-
- /*
- * Make sure we're not recording music playing in the background, ask the
- * MediaPlaybackService to pause playback.
- */
- private void pauseAudioPlayback() {
- // Shamelessly copied from MediaPlaybackService.java, which
- // should be public, but isn't.
- Intent i = new Intent("com.android.music.musicservicecommand");
- i.putExtra("command", "pause");
-
- mActivity.sendBroadcast(i);
- }
-
- // For testing.
- public boolean isRecording() {
- return mMediaRecorderRecording;
- }
-
- private void startVideoRecording() {
- Log.v(TAG, "startVideoRecording");
- mUI.enablePreviewThumb(false);
- mUI.setSwipingEnabled(false);
-
- mActivity.updateStorageSpaceAndHint();
- if (mActivity.getStorageSpace() <= Storage.LOW_STORAGE_THRESHOLD) {
- Log.v(TAG, "Storage issue, ignore the start request");
- return;
- }
-
- if (!mCameraDevice.waitDone()) return;
- mCurrentVideoUri = null;
- if (effectsActive()) {
- initializeEffectsRecording();
- if (mEffectsRecorder == null) {
- Log.e(TAG, "Fail to initialize effect recorder");
- return;
- }
- } else {
- initializeRecorder();
- if (mMediaRecorder == null) {
- Log.e(TAG, "Fail to initialize media recorder");
- return;
- }
- }
-
- pauseAudioPlayback();
-
- if (effectsActive()) {
- try {
- mEffectsRecorder.startRecording();
- } catch (RuntimeException e) {
- Log.e(TAG, "Could not start effects recorder. ", e);
- releaseEffectsRecorder();
- return;
- }
- } else {
- try {
- mMediaRecorder.start(); // Recording is now started
- } catch (RuntimeException e) {
- Log.e(TAG, "Could not start media recorder. ", e);
- releaseMediaRecorder();
- // If start fails, frameworks will not lock the camera for us.
- mCameraDevice.lock();
- return;
- }
- }
-
- // Make sure the video recording has started before announcing
- // this in accessibility.
- AccessibilityUtils.makeAnnouncement(mUI.getShutterButton(),
- mActivity.getString(R.string.video_recording_started));
-
- // The parameters might have been altered by MediaRecorder already.
- // We need to force mCameraDevice to refresh before getting it.
- mCameraDevice.refreshParameters();
- // The parameters may have been changed by MediaRecorder upon starting
- // recording. We need to alter the parameters if we support camcorder
- // zoom. To reduce latency when setting the parameters during zoom, we
- // update mParameters here once.
- if (ApiHelper.HAS_ZOOM_WHEN_RECORDING) {
- mParameters = mCameraDevice.getParameters();
- }
-
- mUI.enableCameraControls(false);
-
- mMediaRecorderRecording = true;
- mOrientationManager.lockOrientation();
- mRecordingStartTime = SystemClock.uptimeMillis();
- mUI.showRecordingUI(true, mParameters.isZoomSupported());
-
- updateRecordingTime();
- keepScreenOn();
- UsageStatistics.onEvent(UsageStatistics.COMPONENT_CAMERA,
- UsageStatistics.ACTION_CAPTURE_START, "Video");
- }
-
- private void showCaptureResult() {
- mIsInReviewMode = true;
- Bitmap bitmap = null;
- if (mVideoFileDescriptor != null) {
- bitmap = Thumbnail.createVideoThumbnailBitmap(mVideoFileDescriptor.getFileDescriptor(),
- mDesiredPreviewWidth);
- } else if (mCurrentVideoFilename != null) {
- bitmap = Thumbnail.createVideoThumbnailBitmap(mCurrentVideoFilename,
- mDesiredPreviewWidth);
- }
- if (bitmap != null) {
- // MetadataRetriever already rotates the thumbnail. We should rotate
- // it to match the UI orientation (and mirror if it is front-facing camera).
- CameraInfo[] info = CameraHolder.instance().getCameraInfo();
- boolean mirror = (info[mCameraId].facing == CameraInfo.CAMERA_FACING_FRONT);
- bitmap = Util.rotateAndMirror(bitmap, 0, mirror);
- mUI.showReviewImage(bitmap);
- }
-
- mUI.showReviewControls();
- mUI.enableCameraControls(false);
- mUI.showTimeLapseUI(false);
- }
-
- private void hideAlert() {
- mUI.enableCameraControls(true);
- mUI.hideReviewUI();
- if (mCaptureTimeLapse) {
- mUI.showTimeLapseUI(true);
- }
- }
-
- private boolean stopVideoRecording() {
- Log.v(TAG, "stopVideoRecording");
- mUI.setSwipingEnabled(true);
- mUI.showSwitcher();
-
- boolean fail = false;
- if (mMediaRecorderRecording) {
- boolean shouldAddToMediaStoreNow = false;
-
- try {
- if (effectsActive()) {
- // This is asynchronous, so we can't add to media store now because thumbnail
- // may not be ready. In such case saveVideo() is called later
- // through a callback from the MediaEncoderFilter to EffectsRecorder,
- // and then to the VideoModule.
- mEffectsRecorder.stopRecording();
- } else {
- mMediaRecorder.setOnErrorListener(null);
- mMediaRecorder.setOnInfoListener(null);
- mMediaRecorder.stop();
- shouldAddToMediaStoreNow = true;
- }
- mCurrentVideoFilename = mVideoFilename;
- Log.v(TAG, "stopVideoRecording: Setting current video filename: "
- + mCurrentVideoFilename);
- AccessibilityUtils.makeAnnouncement(mUI.getShutterButton(),
- mActivity.getString(R.string.video_recording_stopped));
- } catch (RuntimeException e) {
- Log.e(TAG, "stop fail", e);
- if (mVideoFilename != null) deleteVideoFile(mVideoFilename);
- fail = true;
- }
- mMediaRecorderRecording = false;
- mOrientationManager.unlockOrientation();
-
- // If the activity is paused, this means activity is interrupted
- // during recording. Release the camera as soon as possible because
- // face unlock or other applications may need to use the camera.
- // However, if the effects are active, then we can only release the
- // camera and cannot release the effects recorder since that will
- // stop the graph. It is possible to separate out the Camera release
- // part and the effects release part. However, the effects recorder
- // does hold on to the camera, hence, it needs to be "disconnected"
- // from the camera in the closeCamera call.
- if (mPaused) {
- // Closing only the camera part if effects active. Effects will
- // be closed in the callback from effects.
- boolean closeEffects = !effectsActive();
- closeCamera(closeEffects);
- }
-
- mUI.showRecordingUI(false, mParameters.isZoomSupported());
- if (!mIsVideoCaptureIntent) {
- mUI.enableCameraControls(true);
- }
- // The orientation was fixed during video recording. Now make it
- // reflect the device orientation as video recording is stopped.
- mUI.setOrientationIndicator(0, true);
- keepScreenOnAwhile();
- if (shouldAddToMediaStoreNow) {
- saveVideo();
- }
- }
- // always release media recorder if no effects running
- if (!effectsActive()) {
- releaseMediaRecorder();
- if (!mPaused) {
- mCameraDevice.lock();
- mCameraDevice.waitDone();
- if (!ApiHelper.HAS_SURFACE_TEXTURE_RECORDING) {
- stopPreview();
- mUI.hideSurfaceView();
- // Switch back to use SurfaceTexture for preview.
- startPreview();
- }
- }
- }
- // Update the parameters here because the parameters might have been altered
- // by MediaRecorder.
- if (!mPaused) mParameters = mCameraDevice.getParameters();
- UsageStatistics.onEvent(UsageStatistics.COMPONENT_CAMERA,
- fail ? UsageStatistics.ACTION_CAPTURE_FAIL :
- UsageStatistics.ACTION_CAPTURE_DONE, "Video",
- SystemClock.uptimeMillis() - mRecordingStartTime);
- return fail;
- }
-
- private void resetScreenOn() {
- mHandler.removeMessages(CLEAR_SCREEN_DELAY);
- mActivity.getWindow().clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- }
-
- private void keepScreenOnAwhile() {
- mHandler.removeMessages(CLEAR_SCREEN_DELAY);
- mActivity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- mHandler.sendEmptyMessageDelayed(CLEAR_SCREEN_DELAY, SCREEN_DELAY);
- }
-
- private void keepScreenOn() {
- mHandler.removeMessages(CLEAR_SCREEN_DELAY);
- mActivity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
- }
-
- private static String millisecondToTimeString(long milliSeconds, boolean displayCentiSeconds) {
- long seconds = milliSeconds / 1000; // round down to compute seconds
- long minutes = seconds / 60;
- long hours = minutes / 60;
- long remainderMinutes = minutes - (hours * 60);
- long remainderSeconds = seconds - (minutes * 60);
-
- StringBuilder timeStringBuilder = new StringBuilder();
-
- // Hours
- if (hours > 0) {
- if (hours < 10) {
- timeStringBuilder.append('0');
- }
- timeStringBuilder.append(hours);
-
- timeStringBuilder.append(':');
- }
-
- // Minutes
- if (remainderMinutes < 10) {
- timeStringBuilder.append('0');
- }
- timeStringBuilder.append(remainderMinutes);
- timeStringBuilder.append(':');
-
- // Seconds
- if (remainderSeconds < 10) {
- timeStringBuilder.append('0');
- }
- timeStringBuilder.append(remainderSeconds);
-
- // Centi seconds
- if (displayCentiSeconds) {
- timeStringBuilder.append('.');
- long remainderCentiSeconds = (milliSeconds - seconds * 1000) / 10;
- if (remainderCentiSeconds < 10) {
- timeStringBuilder.append('0');
- }
- timeStringBuilder.append(remainderCentiSeconds);
- }
-
- return timeStringBuilder.toString();
- }
-
- private long getTimeLapseVideoLength(long deltaMs) {
- // For better approximation calculate fractional number of frames captured.
- // This will update the video time at a higher resolution.
- double numberOfFrames = (double) deltaMs / mTimeBetweenTimeLapseFrameCaptureMs;
- return (long) (numberOfFrames / mProfile.videoFrameRate * 1000);
- }
-
- private void updateRecordingTime() {
- if (!mMediaRecorderRecording) {
- return;
- }
- long now = SystemClock.uptimeMillis();
- long delta = now - mRecordingStartTime;
-
- // Starting a minute before reaching the max duration
- // limit, we'll countdown the remaining time instead.
- boolean countdownRemainingTime = (mMaxVideoDurationInMs != 0
- && delta >= mMaxVideoDurationInMs - 60000);
-
- long deltaAdjusted = delta;
- if (countdownRemainingTime) {
- deltaAdjusted = Math.max(0, mMaxVideoDurationInMs - deltaAdjusted) + 999;
- }
- String text;
-
- long targetNextUpdateDelay;
- if (!mCaptureTimeLapse) {
- text = millisecondToTimeString(deltaAdjusted, false);
- targetNextUpdateDelay = 1000;
- } else {
- // The length of time lapse video is different from the length
- // of the actual wall clock time elapsed. Display the video length
- // only in format hh:mm:ss.dd, where dd are the centi seconds.
- text = millisecondToTimeString(getTimeLapseVideoLength(delta), true);
- targetNextUpdateDelay = mTimeBetweenTimeLapseFrameCaptureMs;
- }
-
- mUI.setRecordingTime(text);
-
- if (mRecordingTimeCountsDown != countdownRemainingTime) {
- // Avoid setting the color on every update, do it only
- // when it needs changing.
- mRecordingTimeCountsDown = countdownRemainingTime;
-
- int color = mActivity.getResources().getColor(countdownRemainingTime
- ? R.color.recording_time_remaining_text
- : R.color.recording_time_elapsed_text);
-
- mUI.setRecordingTimeTextColor(color);
- }
-
- long actualNextUpdateDelay = targetNextUpdateDelay - (delta % targetNextUpdateDelay);
- mHandler.sendEmptyMessageDelayed(
- UPDATE_RECORD_TIME, actualNextUpdateDelay);
- }
-
- private static boolean isSupported(String value, List<String> supported) {
- return supported == null ? false : supported.indexOf(value) >= 0;
- }
-
- @SuppressWarnings("deprecation")
- private void setCameraParameters() {
- mParameters.setPreviewSize(mDesiredPreviewWidth, mDesiredPreviewHeight);
- int[] fpsRange = Util.getMaxPreviewFpsRange(mParameters);
- if (fpsRange.length > 0) {
- mParameters.setPreviewFpsRange(
- fpsRange[Parameters.PREVIEW_FPS_MIN_INDEX],
- fpsRange[Parameters.PREVIEW_FPS_MAX_INDEX]);
- } else {
- mParameters.setPreviewFrameRate(mProfile.videoFrameRate);
- }
-
- // Set flash mode.
- String flashMode;
- if (mUI.isVisible()) {
- flashMode = mPreferences.getString(
- CameraSettings.KEY_VIDEOCAMERA_FLASH_MODE,
- mActivity.getString(R.string.pref_camera_video_flashmode_default));
- } else {
- flashMode = Parameters.FLASH_MODE_OFF;
- }
- List<String> supportedFlash = mParameters.getSupportedFlashModes();
- if (isSupported(flashMode, supportedFlash)) {
- mParameters.setFlashMode(flashMode);
- } else {
- flashMode = mParameters.getFlashMode();
- if (flashMode == null) {
- flashMode = mActivity.getString(
- R.string.pref_camera_flashmode_no_flash);
- }
- }
-
- // Set white balance parameter.
- String whiteBalance = mPreferences.getString(
- CameraSettings.KEY_WHITE_BALANCE,
- mActivity.getString(R.string.pref_camera_whitebalance_default));
- if (isSupported(whiteBalance,
- mParameters.getSupportedWhiteBalance())) {
- mParameters.setWhiteBalance(whiteBalance);
- } else {
- whiteBalance = mParameters.getWhiteBalance();
- if (whiteBalance == null) {
- whiteBalance = Parameters.WHITE_BALANCE_AUTO;
- }
- }
-
- // Set zoom.
- if (mParameters.isZoomSupported()) {
- mParameters.setZoom(mZoomValue);
- }
-
- // Set continuous autofocus.
- List<String> supportedFocus = mParameters.getSupportedFocusModes();
- if (isSupported(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO, supportedFocus)) {
- mParameters.setFocusMode(Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
- }
-
- mParameters.set(Util.RECORDING_HINT, Util.TRUE);
-
- // Enable video stabilization. Convenience methods not available in API
- // level <= 14
- String vstabSupported = mParameters.get("video-stabilization-supported");
- if ("true".equals(vstabSupported)) {
- mParameters.set("video-stabilization", "true");
- }
-
- // Set picture size.
- // The logic here is different from the logic in still-mode camera.
- // There we determine the preview size based on the picture size, but
- // here we determine the picture size based on the preview size.
- List<Size> supported = mParameters.getSupportedPictureSizes();
- Size optimalSize = Util.getOptimalVideoSnapshotPictureSize(supported,
- (double) mDesiredPreviewWidth / mDesiredPreviewHeight);
- Size original = mParameters.getPictureSize();
- if (!original.equals(optimalSize)) {
- mParameters.setPictureSize(optimalSize.width, optimalSize.height);
- }
- Log.v(TAG, "Video snapshot size is " + optimalSize.width + "x" +
- optimalSize.height);
-
- // Set JPEG quality.
- int jpegQuality = CameraProfile.getJpegEncodingQualityParameter(mCameraId,
- CameraProfile.QUALITY_HIGH);
- mParameters.setJpegQuality(jpegQuality);
-
- mCameraDevice.setParameters(mParameters);
- // Keep preview size up to date.
- mParameters = mCameraDevice.getParameters();
- }
-
- @Override
- public void onActivityResult(int requestCode, int resultCode, Intent data) {
- switch (requestCode) {
- case REQUEST_EFFECT_BACKDROPPER:
- if (resultCode == Activity.RESULT_OK) {
- // onActivityResult() runs before onResume(), so this parameter will be
- // seen by startPreview from onResume()
- mEffectUriFromGallery = data.getData().toString();
- Log.v(TAG, "Received URI from gallery: " + mEffectUriFromGallery);
- mResetEffect = false;
- } else {
- mEffectUriFromGallery = null;
- Log.w(TAG, "No URI from gallery");
- mResetEffect = true;
- }
- break;
- }
- }
-
- @Override
- public void onEffectsUpdate(int effectId, int effectMsg) {
- Log.v(TAG, "onEffectsUpdate. Effect Message = " + effectMsg);
- if (effectMsg == EffectsRecorder.EFFECT_MSG_EFFECTS_STOPPED) {
- // Effects have shut down. Hide learning message if any,
- // and restart regular preview.
- checkQualityAndStartPreview();
- } else if (effectMsg == EffectsRecorder.EFFECT_MSG_RECORDING_DONE) {
- // This follows the codepath from onStopVideoRecording.
- if (mEffectsDisplayResult) {
- saveVideo();
- if (mIsVideoCaptureIntent) {
- if (mQuickCapture) {
- doReturnToCaller(true);
- } else {
- showCaptureResult();
- }
- }
- }
- mEffectsDisplayResult = false;
- // In onPause, these were not called if the effects were active. We
- // had to wait till the effects recording is complete to do this.
- if (mPaused) {
- closeVideoFileDescriptor();
- }
- } else if (effectMsg == EffectsRecorder.EFFECT_MSG_PREVIEW_RUNNING) {
- // Enable the shutter button once the preview is complete.
- mUI.enableShutter(true);
- }
- // In onPause, this was not called if the effects were active. We had to
- // wait till the effects completed to do this.
- if (mPaused) {
- Log.v(TAG, "OnEffectsUpdate: closing effects if activity paused");
- closeEffects();
- }
- }
-
- public void onCancelBgTraining(View v) {
- // Write default effect out to shared prefs
- writeDefaultEffectToPrefs();
- // Tell VideoCamer to re-init based on new shared pref values.
- onSharedPreferenceChanged();
- }
-
- @Override
- public synchronized void onEffectsError(Exception exception, String fileName) {
- // TODO: Eventually we may want to show the user an error dialog, and then restart the
- // camera and encoder gracefully. For now, we just delete the file and bail out.
- if (fileName != null && new File(fileName).exists()) {
- deleteVideoFile(fileName);
- }
- try {
- if (Class.forName("android.filterpacks.videosink.MediaRecorderStopException")
- .isInstance(exception)) {
- Log.w(TAG, "Problem recoding video file. Removing incomplete file.");
- return;
- }
- } catch (ClassNotFoundException ex) {
- Log.w(TAG, ex);
- }
- throw new RuntimeException("Error during recording!", exception);
- }
-
- @Override
- public void onConfigurationChanged(Configuration newConfig) {
- Log.v(TAG, "onConfigurationChanged");
- setDisplayOrientation();
- }
-
- @Override
- public void onOverriddenPreferencesClicked() {
- }
-
- @Override
- // TODO: Delete this after old camera code is removed
- public void onRestorePreferencesClicked() {
- }
-
- private boolean effectsActive() {
- return (mEffectType != EffectsRecorder.EFFECT_NONE);
- }
-
- @Override
- public void onSharedPreferenceChanged() {
- // ignore the events after "onPause()" or preview has not started yet
- if (mPaused) return;
- synchronized (mPreferences) {
- // If mCameraDevice is not ready then we can set the parameter in
- // startPreview().
- if (mCameraDevice == null) return;
-
- boolean recordLocation = RecordLocationPreference.get(
- mPreferences, mContentResolver);
- mLocationManager.recordLocation(recordLocation);
-
- // Check if the current effects selection has changed
- if (updateEffectSelection()) return;
-
- readVideoPreferences();
- mUI.showTimeLapseUI(mCaptureTimeLapse);
- // We need to restart the preview if preview size is changed.
- Size size = mParameters.getPreviewSize();
- if (size.width != mDesiredPreviewWidth
- || size.height != mDesiredPreviewHeight) {
- if (!effectsActive()) {
- stopPreview();
- } else {
- mEffectsRecorder.release();
- mEffectsRecorder = null;
- }
- resizeForPreviewAspectRatio();
- startPreview(); // Parameters will be set in startPreview().
- } else {
- setCameraParameters();
- }
- mUI.updateOnScreenIndicators(mParameters, mPreferences);
- }
- }
-
- protected void setCameraId(int cameraId) {
- ListPreference pref = mPreferenceGroup.findPreference(CameraSettings.KEY_CAMERA_ID);
- pref.setValue("" + cameraId);
- }
-
- private void switchCamera() {
- if (mPaused) return;
-
- Log.d(TAG, "Start to switch camera.");
- mCameraId = mPendingSwitchCameraId;
- mPendingSwitchCameraId = -1;
- setCameraId(mCameraId);
-
- closeCamera();
- mUI.collapseCameraControls();
- // Restart the camera and initialize the UI. From onCreate.
- mPreferences.setLocalId(mActivity, mCameraId);
- CameraSettings.upgradeLocalPreferences(mPreferences.getLocal());
- openCamera();
- readVideoPreferences();
- startPreview();
- initializeVideoSnapshot();
- resizeForPreviewAspectRatio();
- initializeVideoControl();
-
- // From onResume
- mZoomValue = 0;
- mUI.initializeZoom(mParameters);
- mUI.setOrientationIndicator(0, false);
-
- // Start switch camera animation. Post a message because
- // onFrameAvailable from the old camera may already exist.
- mHandler.sendEmptyMessage(SWITCH_CAMERA_START_ANIMATION);
- mUI.updateOnScreenIndicators(mParameters, mPreferences);
- }
-
- // Preview texture has been copied. Now camera can be released and the
- // animation can be started.
- @Override
- public void onPreviewTextureCopied() {
- mHandler.sendEmptyMessage(SWITCH_CAMERA);
- }
-
- @Override
- public void onCaptureTextureCopied() {
- }
-
- private boolean updateEffectSelection() {
- int previousEffectType = mEffectType;
- Object previousEffectParameter = mEffectParameter;
- mEffectType = CameraSettings.readEffectType(mPreferences);
- mEffectParameter = CameraSettings.readEffectParameter(mPreferences);
-
- if (mEffectType == previousEffectType) {
- if (mEffectType == EffectsRecorder.EFFECT_NONE) return false;
- if (mEffectParameter.equals(previousEffectParameter)) return false;
- }
- Log.v(TAG