summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--camera/ANativeWindowDisplayAdapter.cpp109
-rw-r--r--camera/AppCallbackNotifier.cpp151
-rw-r--r--camera/BaseCameraAdapter.cpp62
-rw-r--r--camera/BufferSourceAdapter.cpp44
-rw-r--r--camera/CameraHal.cpp482
-rw-r--r--camera/CameraHalCommon.cpp8
-rw-r--r--camera/CameraHalUtilClasses.cpp16
-rw-r--r--camera/CameraHal_Module.cpp51
-rw-r--r--camera/CameraParameters.cpp14
-rw-r--r--camera/CameraProperties.cpp8
-rw-r--r--camera/Encoder_libjpeg.cpp17
-rw-r--r--camera/MemoryManager.cpp6
-rw-r--r--camera/OMXCameraAdapter/OMX3A.cpp123
-rw-r--r--camera/OMXCameraAdapter/OMXAlgo.cpp38
-rw-r--r--camera/OMXCameraAdapter/OMXCameraAdapter.cpp240
-rw-r--r--camera/OMXCameraAdapter/OMXCapabilities.cpp78
-rw-r--r--camera/OMXCameraAdapter/OMXCapture.cpp118
-rw-r--r--camera/OMXCameraAdapter/OMXDccDataSave.cpp12
-rw-r--r--camera/OMXCameraAdapter/OMXDefaults.cpp16
-rw-r--r--camera/OMXCameraAdapter/OMXExif.cpp22
-rw-r--r--camera/OMXCameraAdapter/OMXFD.cpp20
-rw-r--r--camera/OMXCameraAdapter/OMXFocus.cpp42
-rw-r--r--camera/OMXCameraAdapter/OMXMetadata.cpp52
-rw-r--r--camera/OMXCameraAdapter/OMXReprocess.cpp26
-rw-r--r--camera/OMXCameraAdapter/OMXZoom.cpp18
-rw-r--r--camera/SensorListener.cpp36
-rw-r--r--camera/TICameraParameters.cpp7
-rw-r--r--camera/V4LCameraAdapter/V4LCameraAdapter.cpp22
-rw-r--r--camera/V4LCameraAdapter/V4LCapabilities.cpp16
-rw-r--r--camera/inc/ANativeWindowDisplayAdapter.h25
-rw-r--r--camera/inc/BaseCameraAdapter.h75
-rw-r--r--camera/inc/BufferSourceAdapter.h34
-rw-r--r--camera/inc/CameraHal.h100
-rw-r--r--camera/inc/CameraProperties.h10
-rw-r--r--camera/inc/Common.h1
-rw-r--r--camera/inc/Encoder_libjpeg.h16
-rw-r--r--camera/inc/General3A_Settings.h100
-rw-r--r--camera/inc/OMXCameraAdapter/OMXCameraAdapter.h160
-rw-r--r--camera/inc/OMXCameraAdapter/OMXSceneModeTables.h6
-rw-r--r--camera/inc/SensorListener.h24
-rw-r--r--camera/inc/TICameraParameters.h6
-rw-r--r--camera/inc/V4LCameraAdapter/V4LCameraAdapter.h33
-rw-r--r--libtiutils/DebugUtils.cpp2
-rw-r--r--libtiutils/ErrorUtils.cpp9
-rw-r--r--libtiutils/ErrorUtils.h8
-rw-r--r--libtiutils/MessageQueue.cpp6
-rw-r--r--libtiutils/MessageQueue.h18
-rw-r--r--libtiutils/Semaphore.cpp8
-rw-r--r--libtiutils/Semaphore.h8
-rw-r--r--libtiutils/Status.h67
-rw-r--r--libtiutils/UtilsCommon.h4
51 files changed, 1376 insertions, 1198 deletions
diff --git a/camera/ANativeWindowDisplayAdapter.cpp b/camera/ANativeWindowDisplayAdapter.cpp
index 95914a5..7fe574a 100644
--- a/camera/ANativeWindowDisplayAdapter.cpp
+++ b/camera/ANativeWindowDisplayAdapter.cpp
@@ -20,7 +20,8 @@
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-namespace android {
+namespace Ti {
+namespace Camera {
///Constant declarations
///@todo Check the time units
@@ -36,17 +37,17 @@ OMX_COLOR_FORMATTYPE toOMXPixFormat(const char* parameters_format)
if ( parameters_format != NULL )
{
- if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
{
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
@@ -71,38 +72,38 @@ const char* DisplayAdapter::getPixFormatConstant(const char* parameters_format)
if ( parameters_format != NULL )
{
- if (strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
CAMHAL_LOGVA("CbYCrY format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV422I;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV422I;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0)
{
// TODO(XXX): We are treating YV12 the same as YUV420SP
CAMHAL_LOGVA("YUV420SP format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
CAMHAL_LOGVA("RGB565 format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_RGB565;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_RGB565;
}
- else if(strcmp(parameters_format, (const char *) CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0)
+ else if(strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0)
{
CAMHAL_LOGVA("BAYER format selected");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
}
else
{
CAMHAL_LOGEA("Invalid format, NV12 format selected as default");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
}
else
{
CAMHAL_LOGEA("Preview format is NULL, defaulting to NV12");
- pixFormat = (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP;
+ pixFormat = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
return pixFormat;
@@ -114,19 +115,19 @@ size_t DisplayAdapter::getBufSize(const char* parameters_format, int width, int
if ( parameters_format != NULL ) {
if (strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
buf_size = width * height * 2;
}
- else if((strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
- (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
+ else if((strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) ||
+ (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0)) {
buf_size = width * height * 3 / 2;
}
else if(strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
buf_size = width * height * 2;
}
else if (strcmp(parameters_format,
- (const char *) CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
buf_size = width * height * 2;
} else {
CAMHAL_LOGEA("Invalid format");
@@ -191,8 +192,8 @@ ANativeWindowDisplayAdapter::ANativeWindowDisplayAdapter():mDisplayThread(NULL),
ANativeWindowDisplayAdapter::~ANativeWindowDisplayAdapter()
{
- Semaphore sem;
- TIUTILS::Message msg;
+ Utils::Semaphore sem;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -248,7 +249,7 @@ status_t ANativeWindowDisplayAdapter::initialize()
}
///Start the display thread
- status_t ret = mDisplayThread->run("DisplayThread", PRIORITY_URGENT_DISPLAY);
+ status_t ret = mDisplayThread->run("DisplayThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
CAMHAL_LOGEA("Couldn't run display thread");
@@ -344,7 +345,7 @@ status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime
if ( NULL != refTime )
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
memcpy(&mStartCapture, refTime, sizeof(struct timeval));
}
@@ -358,8 +359,8 @@ status_t ANativeWindowDisplayAdapter::setSnapshotTimeRef(struct timeval *refTime
int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct timeval *refTime)
{
- Semaphore sem;
- TIUTILS::Message msg;
+ Utils::Semaphore sem;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -375,7 +376,7 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
if ( NULL != refTime )
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
memcpy(&mStandbyToShot, refTime, sizeof(struct timeval));
mMeasureStandby = true;
}
@@ -413,7 +414,7 @@ int ANativeWindowDisplayAdapter::enableDisplay(int width, int height, struct tim
int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
LOG_FUNCTION_NAME;
@@ -433,9 +434,9 @@ int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
{
//Send STOP_DISPLAY COMMAND to display thread. Display thread will stop and dequeue all messages
// and then wait for message
- Semaphore sem;
+ Utils::Semaphore sem;
sem.Create();
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = DisplayThread::DISPLAY_STOP;
// Send the semaphore to signal once the command is completed
@@ -450,7 +451,7 @@ int ANativeWindowDisplayAdapter::disableDisplay(bool cancel_buffer)
}
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
{
///Reset the display enabled flag
mDisplayEnabled = false;
@@ -494,7 +495,7 @@ status_t ANativeWindowDisplayAdapter::pauseDisplay(bool pause)
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mPaused = pause;
}
@@ -528,8 +529,8 @@ CameraBuffer* ANativeWindowDisplayAdapter::allocateBufferList(int width, int hei
int i = -1;
const int lnumBufs = numBufs;
int undequeued = 0;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
mBuffers = new CameraBuffer [lnumBufs];
memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
@@ -840,7 +841,7 @@ status_t ANativeWindowDisplayAdapter::returnBuffersToWindow()
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
//Give the buffers back to display here - sort of free it
if (mANativeWindow)
for(unsigned int i = 0; i < mFramesWithCameraAdapterMap.size(); i++) {
@@ -884,7 +885,7 @@ int ANativeWindowDisplayAdapter::freeBufferList(CameraBuffer * buflist)
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mBuffers != buflist)
{
@@ -942,7 +943,7 @@ void ANativeWindowDisplayAdapter::displayThread()
while(shouldLive)
{
- ret = TIUTILS::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
+ ret = Utils::MessageQueue::waitForMsg(&mDisplayThread->msgQ()
, &mDisplayQ
, NULL
, ANativeWindowDisplayAdapter::DISPLAY_TIMEOUT);
@@ -964,7 +965,7 @@ void ANativeWindowDisplayAdapter::displayThread()
}
else
{
- TIUTILS::Message msg;
+ Utils::Message msg;
///Get the dummy msg from the displayQ
if(mDisplayQ.get(&msg)!=NO_ERROR)
{
@@ -995,7 +996,7 @@ void ANativeWindowDisplayAdapter::displayThread()
bool ANativeWindowDisplayAdapter::processHalMsg()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1024,7 +1025,7 @@ bool ANativeWindowDisplayAdapter::processHalMsg()
// flush frame message queue
while ( !mDisplayQ.isEmpty() ) {
- TIUTILS::Message message;
+ Utils::Message message;
mDisplayQ.get(&message);
}
@@ -1054,7 +1055,7 @@ bool ANativeWindowDisplayAdapter::processHalMsg()
{
CAMHAL_LOGDA("+Signalling display semaphore");
- Semaphore &sem = *((Semaphore*)msg.arg1);
+ Utils::Semaphore &sem = *((Utils::Semaphore*)msg.arg1);
sem.Signal();
@@ -1072,7 +1073,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
status_t ret = NO_ERROR;
uint32_t actualFramesWithDisplay = 0;
android_native_buffer_t *buffer = NULL;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
int i;
///@todo Do cropping based on the stabilized frame coordinates
@@ -1104,7 +1105,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
(!mPaused || CameraFrame::CameraFrame::SNAPSHOT_FRAME == dispFrame.mType) &&
!mSuspend)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
uint32_t xOff = (dispFrame.mOffset% PAGE_SIZE);
uint32_t yOff = (dispFrame.mOffset / PAGE_SIZE);
@@ -1114,15 +1115,15 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
CAMHAL_LOGDB("Offset %d xOff = %d, yOff = %d", dispFrame.mOffset, xOff, yOff);
uint8_t bytesPerPixel;
///Calculate bytes per pixel based on the pixel format
- if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
+ if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0)
{
bytesPerPixel = 2;
}
- else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0)
+ else if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0)
{
bytesPerPixel = 2;
}
- else if(strcmp(mPixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
+ else if(strcmp(mPixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0)
{
bytesPerPixel = 1;
}
@@ -1158,7 +1159,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
// HWComposer has not minimum buffer requirement. We should be able to dequeue
// the buffer immediately
- TIUTILS::Message msg;
+ Utils::Message msg;
mDisplayQ.put(&msg);
@@ -1184,7 +1185,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
}
else
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
// unlock buffer before giving it up
@@ -1198,7 +1199,7 @@ status_t ANativeWindowDisplayAdapter::PostFrame(ANativeWindowDisplayAdapter::Dis
mFramesWithCameraAdapterMap.removeItem((buffer_handle_t *) dispFrame.mBuffer->opaque);
- TIUTILS::Message msg;
+ Utils::Message msg;
mDisplayQ.put(&msg);
ret = NO_ERROR;
}
@@ -1214,8 +1215,8 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
int i = 0;
unsigned int k;
int stride; // dummy variable to get stride
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
void *y_uv[2];
// TODO(XXX): Do we need to keep stride information in camera hal?
@@ -1276,7 +1277,7 @@ bool ANativeWindowDisplayAdapter::handleFrameReturn()
}
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mFramesWithCameraAdapterMap.add((buffer_handle_t *) mBuffers[i].opaque, i);
}
@@ -1337,5 +1338,5 @@ void ANativeWindowDisplayAdapter::frameCallback(CameraFrame* caFrame)
/*--------------------ANativeWindowDisplayAdapter Class ENDS here-----------------------------*/
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/AppCallbackNotifier.cpp b/camera/AppCallbackNotifier.cpp
index f25f8dd..25bb04c 100644
--- a/camera/AppCallbackNotifier.cpp
+++ b/camera/AppCallbackNotifier.cpp
@@ -23,10 +23,11 @@
#include "NV12_resize.h"
#include "TICameraParameters.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int AppCallbackNotifier::NOTIFIER_TIMEOUT = -1;
-KeyedVector<void*, sp<Encoder_libjpeg> > gEncoderQueue;
+android::KeyedVector<void*, android::sp<Encoder_libjpeg> > gEncoderQueue;
void AppCallbackNotifierEncoderCallback(void* main_jpeg,
void* thumb_jpeg,
@@ -63,14 +64,14 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
size_t jpeg_size;
uint8_t* src = NULL;
CameraBuffer *camera_buffer;
- sp<Encoder_libjpeg> encoder = NULL;
+ android::sp<Encoder_libjpeg> encoder = NULL;
LOG_FUNCTION_NAME;
camera_memory_t* picture = NULL;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!main_jpeg) {
goto exit;
@@ -124,7 +125,7 @@ void AppCallbackNotifier::EncoderDoneCb(void* main_jpeg, void* thumb_jpeg, Camer
if(picture && (mNotifierState==AppCallbackNotifier::NOTIFIER_STARTED) &&
(mCameraHal->msgTypeEnabled(CAMERA_MSG_COMPRESSED_IMAGE)))
{
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
#if defined(OMAP_ENHANCEMENT)
if ( mBurst )
@@ -187,7 +188,7 @@ status_t AppCallbackNotifier::initialize()
}
///Start the display thread
- status_t ret = mNotificationThread->run("NotificationThread", PRIORITY_URGENT_DISPLAY);
+ status_t ret = mNotificationThread->run("NotificationThread", android::PRIORITY_URGENT_DISPLAY);
if(ret!=NO_ERROR)
{
CAMHAL_LOGEA("Couldn't run NotificationThread");
@@ -213,7 +214,7 @@ void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
camera_request_memory get_memory,
void *user)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -229,7 +230,7 @@ void AppCallbackNotifier::setCallbacks(CameraHal* cameraHal,
void AppCallbackNotifier::setMeasurements(bool enable)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -281,7 +282,7 @@ bool AppCallbackNotifier::notificationThread()
LOG_FUNCTION_NAME;
//CAMHAL_LOGDA("Notification Thread waiting for message");
- ret = TIUTILS::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
+ ret = Utils::MessageQueue::waitForMsg(&mNotificationThread->msgQ(),
&mEventQ,
&mFrameQ,
AppCallbackNotifier::NOTIFIER_TIMEOUT);
@@ -317,10 +318,10 @@ bool AppCallbackNotifier::notificationThread()
void AppCallbackNotifier::notifyEvent()
{
///Receive and send the event notifications to app
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if ( !mEventQ.hasMsg() ) {
return;
} else {
@@ -479,7 +480,7 @@ static void copy2Dto1D(void *dst,
CAMHAL_LOGVB("pixelFormat = %s; offset=%d",pixelFormat,offset);
if (pixelFormat!=NULL) {
- if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
bytesPerPixel = 2;
bufferSrc = ( unsigned char * ) y_uv[0] + offset;
uint32_t xOff = offset % stride;
@@ -524,8 +525,8 @@ static void copy2Dto1D(void *dst,
}
return;
- } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
bytesPerPixel = 1;
bufferDst = ( unsigned char * ) dst;
bufferDstEnd = ( unsigned char * ) dst + width*height*bytesPerPixel;
@@ -550,7 +551,7 @@ static void copy2Dto1D(void *dst,
bufferSrc_UV = ( uint16_t * ) ((uint8_t*)y_uv[1] + (stride/2)*yOff + xOff);
- if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
uint16_t *bufferDst_UV;
// Step 2: UV plane: convert NV12 to NV21 by swapping U & V
@@ -598,7 +599,7 @@ static void copy2Dto1D(void *dst,
: "cc", "memory", "q0", "q1"
);
}
- } else if (strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
uint16_t *bufferDst_U;
uint16_t *bufferDst_V;
@@ -664,7 +665,7 @@ static void copy2Dto1D(void *dst,
}
return ;
- } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
bytesPerPixel = 2;
}
}
@@ -687,7 +688,7 @@ void AppCallbackNotifier::copyAndSendPictureFrame(CameraFrame* frame, int32_t ms
// scope for lock
if (mCameraHal->msgTypeEnabled(msgType)) {
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
goto exit;
@@ -723,7 +724,7 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
// scope for lock
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mNotifierState != AppCallbackNotifier::NOTIFIER_STARTED) {
goto exit;
@@ -782,7 +783,7 @@ void AppCallbackNotifier::copyAndSendPreviewFrame(CameraFrame* frame, int32_t ms
if((mNotifierState == AppCallbackNotifier::NOTIFIER_STARTED) &&
mCameraHal->msgTypeEnabled(msgType) &&
(dest != NULL) && (dest->mapped != NULL)) {
- AutoMutex locker(mLock);
+ android::AutoMutex locker(mLock);
if ( mPreviewMemory )
mDataCb(msgType, mPreviewMemory, mPreviewBufCount, NULL, mCallbackCookie);
}
@@ -828,17 +829,17 @@ status_t AppCallbackNotifier::dummyRaw()
void AppCallbackNotifier::notifyFrame()
{
///Receive and send the frame notifications to app
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
- MemoryHeapBase *heap;
- MemoryBase *buffer = NULL;
- sp<MemoryBase> memBase;
+ android::MemoryHeapBase *heap;
+ android::MemoryBase *buffer = NULL;
+ android::sp<android::MemoryBase> memBase;
void *buf = NULL;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(!mFrameQ.isEmpty()) {
mFrameQ.get(&msg);
} else {
@@ -902,17 +903,17 @@ void AppCallbackNotifier::notifyFrame()
buf = raw_picture->data;
}
- CameraParameters parameters;
+ android::CameraParameters parameters;
char *params = mCameraHal->getParameters();
- const String8 strParams(params);
+ const android::String8 strParams(params);
parameters.unflatten(strParams);
- encode_quality = parameters.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ encode_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
if (encode_quality < 0 || encode_quality > 100) {
encode_quality = 100;
}
- tn_quality = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ tn_quality = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
if (tn_quality < 0 || tn_quality > 100) {
tn_quality = 100;
}
@@ -947,12 +948,12 @@ void AppCallbackNotifier::notifyFrame()
main_jpeg->format = TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY;
}
else { //if ( CameraFrame::FORMAT_YUV422I_YUYV & frame->mQuirks)
- main_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV422I;
+ main_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV422I;
}
}
- tn_width = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
- tn_height = parameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ tn_width = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ tn_height = parameters.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
previewFormat = parameters.getPreviewFormat();
if ((tn_width > 0) && (tn_height > 0) && ( NULL != previewFormat )) {
@@ -981,10 +982,10 @@ void AppCallbackNotifier::notifyFrame()
tn_jpeg->out_height = tn_height;
tn_jpeg->right_crop = 0;
tn_jpeg->start_offset = 0;
- tn_jpeg->format = CameraParameters::PIXEL_FORMAT_YUV420SP;;
+ tn_jpeg->format = android::CameraParameters::PIXEL_FORMAT_YUV420SP;;
}
- sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
+ android::sp<Encoder_libjpeg> encoder = new Encoder_libjpeg(main_jpeg,
tn_jpeg,
AppCallbackNotifierEncoderCallback,
(CameraFrame::FrameType)frame->mFrameType,
@@ -1016,7 +1017,7 @@ void AppCallbackNotifier::notifyFrame()
#ifdef COPY_IMAGE_BUFFER
{
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
#if defined(OMAP_ENHANCEMENT)
if ( mBurst )
{
@@ -1037,7 +1038,7 @@ void AppCallbackNotifier::notifyFrame()
( NULL != mDataCb) &&
( mCameraHal->msgTypeEnabled(CAMERA_MSG_VIDEO_FRAME) ) )
{
- AutoMutex locker(mRecordingLock);
+ android::AutoMutex locker(mRecordingLock);
if(mRecording)
{
if(mUseMetaDataBufferMode)
@@ -1055,8 +1056,8 @@ void AppCallbackNotifier::notifyFrame()
if ( mUseVideoBuffers )
{
CameraBuffer *vBuf = mVideoMap.valueFor(frame->mBuffer->opaque);
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
- Rect bounds;
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+ android::Rect bounds;
bounds.left = 0;
bounds.top = 0;
bounds.right = mVideoWidth;
@@ -1084,14 +1085,14 @@ void AppCallbackNotifier::notifyFrame()
VT_resizeFrame_Video_opt2_lp(&input, &output, NULL, 0);
mapper.unlock((buffer_handle_t)vBuf->opaque);
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
/* FIXME remove cast */
videoMetadataBuffer->handle = (void *)vBuf->opaque;
videoMetadataBuffer->offset = 0;
}
else
{
- videoMetadataBuffer->metadataBufferType = (int) kMetadataBufferTypeCameraSource;
+ videoMetadataBuffer->metadataBufferType = (int) android::kMetadataBufferTypeCameraSource;
videoMetadataBuffer->handle = camera_buffer_get_omx_ptr(frame->mBuffer);
videoMetadataBuffer->offset = frame->mOffset;
}
@@ -1182,7 +1183,7 @@ void AppCallbackNotifier::frameCallbackRelay(CameraFrame* caFrame)
void AppCallbackNotifier::frameCallback(CameraFrame* caFrame)
{
///Post the event to the event queue of AppCallbackNotifier
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
LOG_FUNCTION_NAME;
@@ -1211,10 +1212,10 @@ void AppCallbackNotifier::flushAndReturnFrames()
{
LOG_FUNCTION_NAME;
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraFrame *frame;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
while (!mFrameQ.isEmpty()) {
mFrameQ.get(&msg);
frame = (CameraFrame*) msg.arg1;
@@ -1239,7 +1240,7 @@ void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
{
///Post the event to the event queue of AppCallbackNotifier
- TIUTILS::Message msg;
+ Utils::Message msg;
CameraHalEvent *event;
@@ -1254,7 +1255,7 @@ void AppCallbackNotifier::eventCallback(CameraHalEvent* chEvt)
msg.command = AppCallbackNotifier::NOTIFIER_CMD_PROCESS_EVENT;
msg.arg1 = event;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mEventQ.put(&msg);
}
}
@@ -1273,7 +1274,7 @@ void AppCallbackNotifier::flushEventQueue()
{
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mEventQ.clear();
}
}
@@ -1282,7 +1283,7 @@ void AppCallbackNotifier::flushEventQueue()
bool AppCallbackNotifier::processMessage()
{
///Retrieve the command from the command queue and process it
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1333,7 +1334,7 @@ AppCallbackNotifier::~AppCallbackNotifier()
mEventProvider->disableEventNotification(CameraHalEvent::ALL_EVENTS);
}
- TIUTILS::Message msg = {0,0,0,0,0,0};
+ Utils::Message msg = {0,0,0,0,0,0};
msg.command = NotificationThread::NOTIFIER_EXIT;
///Post the message to display thread
@@ -1446,16 +1447,16 @@ size_t AppCallbackNotifier::calculateBufferSize(size_t width, size_t height, con
LOG_FUNCTION_NAME
- if(strcmp(pixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
res = width*height*2;
- } else if(strcmp(pixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
res = (width*height*3)/2;
- } else if(strcmp(pixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
res = width*height*2;
- } else if (strcmp(pixelFormat, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ } else if (strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
size_t yStride, uvStride, ySize, uvSize;
alignYV12(width, height, yStride, uvStride, ySize, uvSize, res);
- mPreviewPixelFormat = CameraParameters::PIXEL_FORMAT_YUV420P;
+ mPreviewPixelFormat = android::CameraParameters::PIXEL_FORMAT_YUV420P;
}
LOG_FUNCTION_NAME_EXIT;
@@ -1466,33 +1467,31 @@ size_t AppCallbackNotifier::calculateBufferSize(size_t width, size_t height, con
const char* AppCallbackNotifier::getContstantForPixelFormat(const char *pixelFormat) {
if (!pixelFormat) {
// returning NV12 as default
- return CameraParameters::PIXEL_FORMAT_YUV420SP;
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
- if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
- return CameraParameters::PIXEL_FORMAT_YUV422I;
- } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) {
- return CameraParameters::PIXEL_FORMAT_YUV420SP;
- } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
- return CameraParameters::PIXEL_FORMAT_RGB565;
- } else if(strcmp(pixelFormat, CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
- return CameraParameters::PIXEL_FORMAT_YUV420P;
+ if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ return android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ) {
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ return android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if(strcmp(pixelFormat, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0) {
+ return android::CameraParameters::PIXEL_FORMAT_YUV420P;
} else {
// returning NV12 as default
- return CameraParameters::PIXEL_FORMAT_YUV420SP;
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
}
-status_t AppCallbackNotifier::startPreviewCallbacks(CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
+status_t AppCallbackNotifier::startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count)
{
- sp<MemoryHeapBase> heap;
- sp<MemoryBase> buffer;
unsigned int *bufArr;
int size = 0;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if ( NULL == mFrameProvider )
{
@@ -1549,7 +1548,7 @@ void AppCallbackNotifier::setBurst(bool burst)
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
mBurst = burst;
@@ -1582,9 +1581,6 @@ void AppCallbackNotifier::setVideoRes(int width, int height)
status_t AppCallbackNotifier::stopPreviewCallbacks()
{
- sp<MemoryHeapBase> heap;
- sp<MemoryBase> buffer;
-
LOG_FUNCTION_NAME;
if ( NULL == mFrameProvider )
@@ -1602,7 +1598,7 @@ status_t AppCallbackNotifier::stopPreviewCallbacks()
mFrameProvider->disableFrameNotification(CameraFrame::SNAPSHOT_FRAME);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mPreviewMemory->release(mPreviewMemory);
mPreviewMemory = 0;
}
@@ -1629,7 +1625,7 @@ status_t AppCallbackNotifier::startRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mRecordingLock);
+ android::AutoMutex lock(mRecordingLock);
if ( NULL == mFrameProvider )
{
@@ -1707,7 +1703,7 @@ status_t AppCallbackNotifier::stopRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mRecordingLock);
+ android::AutoMutex lock(mRecordingLock);
if ( NULL == mFrameProvider )
{
@@ -1869,14 +1865,14 @@ status_t AppCallbackNotifier::stop()
return ALREADY_EXISTS;
}
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mNotifierState = AppCallbackNotifier::NOTIFIER_STOPPED;
CAMHAL_LOGDA(" --> AppCallbackNotifier NOTIFIER_STOPPED \n");
}
while(!gEncoderQueue.isEmpty()) {
- sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
+ android::sp<Encoder_libjpeg> encoder = gEncoderQueue.valueAt(0);
camera_memory_t* encoded_mem = NULL;
ExifElementsTable* exif = NULL;
@@ -1905,4 +1901,5 @@ status_t AppCallbackNotifier::stop()
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/BaseCameraAdapter.cpp b/camera/BaseCameraAdapter.cpp
index a05cac5..7c0b9fc 100644
--- a/camera/BaseCameraAdapter.cpp
+++ b/camera/BaseCameraAdapter.cpp
@@ -18,7 +18,8 @@
const int EVENT_MASK = 0xffff;
-namespace android {
+namespace Ti {
+namespace Camera {
const LUT cameraCommandsUserToHAL[] = {
{ "CAMERA_START_PREVIEW", CameraAdapter::CAMERA_START_PREVIEW },
@@ -100,7 +101,7 @@ BaseCameraAdapter::~BaseCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
mFrameSubscribers.clear();
mImageSubscribers.clear();
@@ -168,7 +169,7 @@ status_t BaseCameraAdapter::setErrorHandler(ErrorNotifier *errorNotifier)
void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, event_callback eventCb, void* cookie)
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
LOG_FUNCTION_NAME;
@@ -228,7 +229,7 @@ void BaseCameraAdapter::enableMsgType(int32_t msgs, frame_callback callback, eve
void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
LOG_FUNCTION_NAME;
@@ -299,7 +300,7 @@ void BaseCameraAdapter::disableMsgType(int32_t msgs, void* cookie)
void BaseCameraAdapter::addFramePointers(CameraBuffer *frameBuf, void *buf)
{
unsigned int *pBuf = (unsigned int *)buf;
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
if ((frameBuf != NULL) && ( pBuf != NULL) )
{
@@ -315,7 +316,7 @@ void BaseCameraAdapter::addFramePointers(CameraBuffer *frameBuf, void *buf)
void BaseCameraAdapter::removeFramePointers()
{
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
int size = mFrameQueue.size();
CAMHAL_LOGVB("Removing %d Frames = ", size);
@@ -342,7 +343,7 @@ void BaseCameraAdapter::returnFrame(CameraBuffer * frameBuf, CameraFrame::FrameT
if ( NO_ERROR == res)
{
- Mutex::Autolock lock(mReturnFrameLock);
+ android::AutoMutex lock(mReturnFrameLock);
refCount = getFrameRefCount(frameBuf, frameType);
@@ -428,7 +429,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mPreviewBufferLock);
+ android::AutoMutex lock(mPreviewBufferLock);
mPreviewBuffers = desc->mBuffers;
mPreviewBuffersLength = desc->mLength;
mPreviewBuffersAvailable.clear();
@@ -482,7 +483,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
mPreviewDataBuffers = desc->mBuffers;
mPreviewDataBuffersLength = desc->mLength;
mPreviewDataBuffersAvailable.clear();
@@ -535,7 +536,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
if ( ret == NO_ERROR )
{
- Mutex::Autolock lock(mCaptureBufferLock);
+ android::AutoMutex lock(mCaptureBufferLock);
mCaptureBuffers = desc->mBuffers;
mCaptureBuffersLength = desc->mLength;
mCaptureBuffersAvailable.clear();
@@ -586,7 +587,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
}
if (ret == NO_ERROR) {
- Mutex::Autolock lock(mVideoInBufferLock);
+ android::AutoMutex lock(mVideoInBufferLock);
mVideoInBuffers = desc->mBuffers;
mVideoInBuffersAvailable.clear();
for (uint32_t i = 0 ; i < desc->mMaxQueueable ; i++) {
@@ -1080,7 +1081,7 @@ status_t BaseCameraAdapter::sendCommand(CameraCommands operation, int value1, in
}
if ( ret == NO_ERROR ) {
- Mutex::Autolock lock(mVideoBufferLock);
+ android::AutoMutex lock(mVideoBufferLock);
mVideoBuffers = desc->mBuffers;
mVideoBuffersLength = desc->mLength;
mVideoBuffersAvailable.clear();
@@ -1255,7 +1256,7 @@ status_t BaseCameraAdapter::notifyZoomSubscribers(int zoomIdx, bool targetReache
return ret;
}
-status_t BaseCameraAdapter::notifyMetadataSubscribers(sp<CameraMetadataResult> &meta)
+status_t BaseCameraAdapter::notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta)
{
event_callback eventCb;
CameraHalEvent metaEvent;
@@ -1360,7 +1361,7 @@ status_t BaseCameraAdapter::sendFrameToSubscribers(CameraFrame *frame)
}
status_t BaseCameraAdapter::__sendFrameToSubscribers(CameraFrame* frame,
- KeyedVector<int, frame_callback> *subscribers,
+ android::KeyedVector<int, frame_callback> *subscribers,
CameraFrame::FrameType frameType)
{
size_t refCount = 0;
@@ -1493,36 +1494,36 @@ int BaseCameraAdapter::getFrameRefCount(CameraBuffer * frameBuf, CameraFrame::Fr
case CameraFrame::IMAGE_FRAME:
case CameraFrame::RAW_FRAME:
{
- Mutex::Autolock lock(mCaptureBufferLock);
+ android::AutoMutex lock(mCaptureBufferLock);
res = mCaptureBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::SNAPSHOT_FRAME:
{
- Mutex::Autolock lock(mSnapshotBufferLock);
+ android::AutoMutex lock(mSnapshotBufferLock);
res = mSnapshotBuffersAvailable.valueFor( ( unsigned int ) frameBuf );
}
break;
case CameraFrame::PREVIEW_FRAME_SYNC:
{
- Mutex::Autolock lock(mPreviewBufferLock);
+ android::AutoMutex lock(mPreviewBufferLock);
res = mPreviewBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::FRAME_DATA_SYNC:
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
res = mPreviewDataBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
{
- Mutex::Autolock lock(mVideoBufferLock);
+ android::AutoMutex lock(mVideoBufferLock);
res = mVideoBuffersAvailable.valueFor(frameBuf );
}
break;
case CameraFrame::REPROCESS_INPUT_FRAME: {
- Mutex::Autolock lock(mVideoInBufferLock);
+ android::AutoMutex lock(mVideoInBufferLock);
res = mVideoInBuffersAvailable.valueFor(frameBuf );
}
break;
@@ -1545,36 +1546,36 @@ void BaseCameraAdapter::setFrameRefCount(CameraBuffer * frameBuf, CameraFrame::F
case CameraFrame::IMAGE_FRAME:
case CameraFrame::RAW_FRAME:
{
- Mutex::Autolock lock(mCaptureBufferLock);
+ android::AutoMutex lock(mCaptureBufferLock);
mCaptureBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::SNAPSHOT_FRAME:
{
- Mutex::Autolock lock(mSnapshotBufferLock);
+ android::AutoMutex lock(mSnapshotBufferLock);
mSnapshotBuffersAvailable.replaceValueFor( ( unsigned int ) frameBuf, refCount);
}
break;
case CameraFrame::PREVIEW_FRAME_SYNC:
{
- Mutex::Autolock lock(mPreviewBufferLock);
+ android::AutoMutex lock(mPreviewBufferLock);
mPreviewBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::FRAME_DATA_SYNC:
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
mPreviewDataBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::VIDEO_FRAME_SYNC:
{
- Mutex::Autolock lock(mVideoBufferLock);
+ android::AutoMutex lock(mVideoBufferLock);
mVideoBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
case CameraFrame::REPROCESS_INPUT_FRAME: {
- Mutex::Autolock lock(mVideoInBufferLock);
+ android::AutoMutex lock(mVideoInBufferLock);
mVideoInBuffersAvailable.replaceValueFor(frameBuf, refCount);
}
break;
@@ -1592,7 +1593,7 @@ status_t BaseCameraAdapter::startVideoCapture()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mVideoBufferLock);
+ android::AutoMutex lock(mVideoBufferLock);
//If the capture is already ongoing, return from here.
if ( mRecording )
@@ -2564,7 +2565,7 @@ CameraAdapter::AdapterState BaseCameraAdapter::getState()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME_EXIT;
@@ -2577,7 +2578,7 @@ CameraAdapter::AdapterState BaseCameraAdapter::getNextState()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME_EXIT;
@@ -2669,7 +2670,8 @@ extern "C" status_t CameraAdapter_Capabilities(
//-----------------------------------------------------------------------------
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/BufferSourceAdapter.cpp b/camera/BufferSourceAdapter.cpp
index 0ac0029..2f5bf4c 100644
--- a/camera/BufferSourceAdapter.cpp
+++ b/camera/BufferSourceAdapter.cpp
@@ -21,25 +21,26 @@
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-namespace android {
+namespace Ti {
+namespace Camera {
static int getANWFormat(const char* parameters_format)
{
int format = HAL_PIXEL_FORMAT_TI_NV12;
if (parameters_format != NULL) {
- if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("CbYCrY format selected");
// TODO(XXX): not defined yet
format = -1;
- } else if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
format = HAL_PIXEL_FORMAT_TI_NV12;
- } else if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
// TODO(XXX): not defined yet
format = -1;
- } else if (strcmp(parameters_format, CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ } else if (strcmp(parameters_format, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
format = HAL_PIXEL_FORMAT_TI_Y16;
} else {
CAMHAL_LOGDA("Invalid format, NV12 format selected as default");
@@ -74,13 +75,13 @@ static const char* getFormatFromANW(int format)
switch (format) {
case HAL_PIXEL_FORMAT_TI_NV12:
// Assuming NV12 1D is RAW or Image frame
- return CameraParameters::PIXEL_FORMAT_YUV420SP;
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
case HAL_PIXEL_FORMAT_TI_Y16:
- return CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ return android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
default:
break;
}
- return CameraParameters::PIXEL_FORMAT_YUV420SP;
+ return android::CameraParameters::PIXEL_FORMAT_YUV420SP;
}
static CameraFrame::FrameType formatToOutputFrameType(const char* format) {
@@ -299,7 +300,7 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int height, con
int i = -1;
const int lnumBufs = numBufs;
int undequeued = 0;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
mBuffers = new CameraBuffer [lnumBufs];
memset (mBuffers, 0, sizeof(CameraBuffer) * lnumBufs);
@@ -389,7 +390,7 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int height, con
for( i = 0; i < mBufferCount-undequeued; i++ ) {
void *y_uv[2];
- Rect bounds(width, height);
+ android::Rect bounds(width, height);
buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
mBufferSource->lock_buffer(mBufferSource, handle);
@@ -401,7 +402,7 @@ CameraBuffer* BufferSourceAdapter::allocateBufferList(int width, int height, con
for(i = (mBufferCount-undequeued); i >= 0 && i < mBufferCount; i++) {
buffer_handle_t *handle = (buffer_handle_t *) mBuffers[i].opaque;
void *y_uv[2];
- Rect bounds(width, height);
+ android::Rect bounds(width, height);
mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
mBuffers[i].mapped = y_uv[0];
@@ -456,7 +457,7 @@ CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
status_t err;
const int lnumBufs = 1;
int formatSource;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
buffer_handle_t *handle;
// TODO(XXX): Only supporting one input buffer at a time right now
@@ -489,7 +490,7 @@ CameraBuffer* BufferSourceAdapter::getBufferList(int *num) {
// lock buffer
{
void *y_uv[2];
- Rect bounds(mBuffers[0].width, mBuffers[0].height);
+ android::Rect bounds(mBuffers[0].width, mBuffers[0].height);
mapper.lock(*handle, CAMHAL_GRALLOC_USAGE, bounds, y_uv);
mBuffers[0].mapped = y_uv[0];
}
@@ -586,7 +587,7 @@ int BufferSourceAdapter::getFd()
status_t BufferSourceAdapter::returnBuffersToWindow()
{
status_t ret = NO_ERROR;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
//Give the buffers back to display here - sort of free it
if (mBufferSource) {
@@ -632,7 +633,7 @@ int BufferSourceAdapter::freeBufferList(CameraBuffer * buflist)
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (mBufferSourceDirection == BUFFER_SOURCE_TAP_OUT) returnBuffersToWindow();
@@ -669,14 +670,14 @@ void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
status_t ret = NO_ERROR;
buffer_handle_t *handle = NULL;
int i;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
if (!mBuffers || !frame->mBuffer) {
CAMHAL_LOGEA("Adapter sent BufferSourceAdapter a NULL frame?");
return;
}
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
for ( i = 0; i < mBufferCount; i++ ) {
if (frame->mBuffer == &mBuffers[i]) {
@@ -695,7 +696,7 @@ void BufferSourceAdapter::handleFrameCallback(CameraFrame* frame)
return;
}
- frame->mMetaData.setTime(CameraMetadata::KEY_TIMESTAMP, frame->mTimestamp);
+ frame->mMetaData.setTime(android::CameraMetadata::KEY_TIMESTAMP, frame->mTimestamp);
ret = mBufferSource->set_metadata(mBufferSource, frame->mMetaData.flatten().string());
if (ret != 0) {
CAMHAL_LOGE("Surface::set_metadata returned error %d", ret);
@@ -723,9 +724,9 @@ bool BufferSourceAdapter::handleFrameReturn()
int i = 0;
int stride; // dummy variable to get stride
CameraFrame::FrameType type;
- GraphicBufferMapper &mapper = GraphicBufferMapper::get();
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
void *y_uv[2];
- Rect bounds(mFrameWidth, mFrameHeight);
+ android::Rect bounds(mFrameWidth, mFrameHeight);
if ( NULL == mBufferSource ) {
return false;
@@ -787,6 +788,7 @@ void BufferSourceAdapter::frameCallback(CameraFrame* caFrame)
/*--------------------BufferSourceAdapter Class ENDS here-----------------------------*/
-};
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
index b62f756..d788f9e 100644
--- a/camera/CameraHal.cpp
+++ b/camera/CameraHal.cpp
@@ -31,7 +31,8 @@
#include <poll.h>
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t);
extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t);
@@ -151,7 +152,7 @@ void CameraHal::enableMsgType(int32_t msgType)
}
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled |= msgType;
}
@@ -190,7 +191,7 @@ void CameraHal::disableMsgType(int32_t msgType)
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled &= ~msgType;
}
@@ -221,7 +222,7 @@ int CameraHal::msgTypeEnabled(int32_t msgType)
int32_t msgEnabled = 0;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
msgEnabled = mMsgEnabled;
if (!previewEnabled()) {
@@ -245,9 +246,9 @@ int CameraHal::setParameters(const char* parameters)
LOG_FUNCTION_NAME;
- CameraParameters params;
+ android::CameraParameters params;
- String8 str_params(parameters);
+ android::String8 str_params(parameters);
params.unflatten(str_params);
LOG_FUNCTION_NAME_EXIT;
@@ -263,7 +264,7 @@ int CameraHal::setParameters(const char* parameters)
@todo Define error codes
*/
-int CameraHal::setParameters(const CameraParameters& params)
+int CameraHal::setParameters(const android::CameraParameters& params)
{
LOG_FUNCTION_NAME;
@@ -276,7 +277,7 @@ int CameraHal::setParameters(const CameraParameters& params)
// Needed for KEY_RECORDING_HINT
bool restartPreviewRequired = false;
bool updateRequired = false;
- CameraParameters oldParams(mParameters.flatten());
+ android::CameraParameters oldParams(mParameters.flatten());
#ifdef V4L_CAMERA_ADAPTER
if (strcmp (V4L_CAMERA_NAME_USB, mCameraProperties->get(CameraProperties::CAMERA_NAME)) == 0 ) {
@@ -285,7 +286,7 @@ int CameraHal::setParameters(const CameraParameters& params)
#endif
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
///Ensure that preview is not enabled when the below parameters are changed.
if(!previewEnabled())
@@ -303,35 +304,35 @@ int CameraHal::setParameters(const CameraParameters& params)
if ((valstr = params.get(TICameraParameters::KEY_VNF)) != NULL) {
if (strcmp(mCameraProperties->get(CameraProperties::VNF_SUPPORTED),
- CameraParameters::TRUE) == 0) {
+ android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDB("VNF %s",
params.get(TICameraParameters::KEY_VNF));
mParameters.set(TICameraParameters::KEY_VNF,
params.get(TICameraParameters::KEY_VNF));
- } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid VNF: %s", valstr);
return BAD_VALUE;
} else {
mParameters.set(TICameraParameters::KEY_VNF,
- CameraParameters::FALSE);
+ android::CameraParameters::FALSE);
}
}
- if ((valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != NULL) {
// make sure we support vstab...if we don't and application is trying to set
// vstab then return an error
if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
- CameraParameters::TRUE) == 0) {
+ android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDB("VSTAB %s",
- params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
- params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
- } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION));
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION));
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid VSTAB: %s", valstr);
return BAD_VALUE;
} else {
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
- CameraParameters::FALSE);
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ android::CameraParameters::FALSE);
}
}
@@ -373,7 +374,7 @@ int CameraHal::setParameters(const CameraParameters& params)
#ifdef OMAP_ENHANCEMENT_VTC
if ((valstr = params.get(TICameraParameters::KEY_VTC_HINT)) != NULL ) {
mParameters.set(TICameraParameters::KEY_VTC_HINT, valstr);
- if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
mVTCUseCase = true;
} else {
mVTCUseCase = false;
@@ -430,11 +431,11 @@ int CameraHal::setParameters(const CameraParameters& params)
mVideoHeight = h;
// Handle RECORDING_HINT to Set/Reset Video Mode Parameters
- valstr = params.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = params.get(android::CameraParameters::KEY_RECORDING_HINT);
if(valstr != NULL)
{
CAMHAL_LOGDB("Recording Hint is set to %s", valstr);
- if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
CAMHAL_LOGVB("Video Resolution: %d x %d", mVideoWidth, mVideoHeight);
#ifdef OMAP_ENHANCEMENT_VTC
@@ -449,12 +450,12 @@ int CameraHal::setParameters(const CameraParameters& params)
getPreferredPreviewRes(&w, &h);
}
}
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
restartPreviewRequired |= setVideoModeParameters(params);
}
- else if(strcmp(valstr, CameraParameters::FALSE) == 0)
+ else if(strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, valstr);
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, valstr);
restartPreviewRequired |= resetVideoModeParameters();
}
else
@@ -470,7 +471,7 @@ int CameraHal::setParameters(const CameraParameters& params)
// ImageCapture activity doesnot set KEY_RECORDING_HINT to FALSE (i.e. simply NULL),
// then Video Mode parameters may remain present in ImageCapture activity as well.
CAMHAL_LOGDA("Recording Hint is set to NULL");
- mParameters.set(CameraParameters::KEY_RECORDING_HINT, "");
+ mParameters.set(android::CameraParameters::KEY_RECORDING_HINT, "");
restartPreviewRequired |= resetVideoModeParameters();
}
@@ -492,19 +493,19 @@ int CameraHal::setParameters(const CameraParameters& params)
CAMHAL_LOGDB("Preview Resolution: %d x %d", w, h);
- if ((valstr = params.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES))) {
CAMHAL_LOGDB("Focus mode set %s", valstr);
// we need to take a decision on the capture mode based on whether CAF picture or
// video is chosen so the behavior of each is consistent to the application
- if(strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
+ if(strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0){
restartPreviewRequired |= resetVideoModeParameters();
- } else if (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
+ } else if (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0){
restartPreviewRequired |= setVideoModeParameters(params);
}
- mParameters.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid FOCUS mode = %s", valstr);
return BAD_VALUE;
@@ -552,7 +553,7 @@ int CameraHal::setParameters(const CameraParameters& params)
if ( (valstr = params.getPictureFormat()) != NULL ) {
if (isParameterValid(params.getPictureFormat(),mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS))) {
- if ((strcmp(valstr, CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) &&
+ if ((strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) &&
mCameraProperties->get(CameraProperties::MAX_PICTURE_WIDTH) &&
mCameraProperties->get(CameraProperties::MAX_PICTURE_HEIGHT)) {
unsigned int width = 0, height = 0;
@@ -583,7 +584,7 @@ int CameraHal::setParameters(const CameraParameters& params)
// be cleared by the client in order for constant FPS to get
// applied.
// If Port FPS needs to be used for configuring, then FPS RANGE should not be set by the APP.
- valstr = params.get(CameraParameters::KEY_PREVIEW_FPS_RANGE);
+ valstr = params.get(android::CameraParameters::KEY_PREVIEW_FPS_RANGE);
if (valstr != NULL && strlen(valstr)) {
int curMaxFPS = 0;
int curMinFPS = 0;
@@ -598,7 +599,7 @@ int CameraHal::setParameters(const CameraParameters& params)
params.getPreviewFpsRange(&minFPS, &maxFPS);
CAMHAL_LOGDB("## requested minFPS = %d; maxFPS=%d",minFPS, maxFPS);
// Validate VFR
- if (!isFpsRangeValid(minFPS, maxFPS, params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)) &&
+ if (!isFpsRangeValid(minFPS, maxFPS, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE)) &&
!isFpsRangeValid(minFPS, maxFPS, params.get(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED))) {
CAMHAL_LOGEA("Invalid FPS Range");
return BAD_VALUE;
@@ -606,7 +607,7 @@ int CameraHal::setParameters(const CameraParameters& params)
framerate = maxFPS / CameraHal::VFR_SCALE;
mParameters.setPreviewFrameRate(framerate);
CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
- mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, valstr);
CAMHAL_LOGDB("FPS Range = %s", valstr);
if ( curMaxFPS == (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) &&
maxFPS < (FRAME_RATE_HIGH_HD * CameraHal::VFR_SCALE) ) {
@@ -615,7 +616,7 @@ int CameraHal::setParameters(const CameraParameters& params)
}
} else {
framerate = params.getPreviewFrameRate();
- if (!isParameterValid(framerate, params.get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)) &&
+ if (!isParameterValid(framerate, params.get(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES)) &&
!isParameterValid(framerate, params.get(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED))) {
CAMHAL_LOGEA("Invalid frame rate");
return BAD_VALUE;
@@ -625,42 +626,38 @@ int CameraHal::setParameters(const CameraParameters& params)
sprintf(tmpBuffer, "%d,%d", framerate * CameraHal::VFR_SCALE, framerate * CameraHal::VFR_SCALE);
mParameters.setPreviewFrameRate(framerate);
CAMHAL_LOGDB("SET FRAMERATE %d", framerate);
- mParameters.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, tmpBuffer);
+ mParameters.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, tmpBuffer);
CAMHAL_LOGDB("FPS Range = %s", tmpBuffer);
}
if ((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) {
if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GBCE),
- CameraParameters::TRUE) == 0) {
+ android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDB("GBCE %s", valstr);
mParameters.set(TICameraParameters::KEY_GBCE, valstr);
- } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid GBCE: %s", valstr);
return BAD_VALUE;
} else {
- mParameters.set(TICameraParameters::KEY_GBCE,
- CameraParameters::FALSE);
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
}
} else {
- mParameters.set(TICameraParameters::KEY_GBCE,
- CameraParameters::FALSE);
+ mParameters.set(TICameraParameters::KEY_GBCE, android::CameraParameters::FALSE);
}
if ((valstr = params.get(TICameraParameters::KEY_GLBCE)) != NULL) {
if (strcmp(mCameraProperties->get(CameraProperties::SUPPORTED_GLBCE),
- CameraParameters::TRUE) == 0) {
+ android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDB("GLBCE %s", valstr);
mParameters.set(TICameraParameters::KEY_GLBCE, valstr);
- } else if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGEB("ERROR: Invalid GLBCE: %s", valstr);
return BAD_VALUE;
} else {
- mParameters.set(TICameraParameters::KEY_GLBCE,
- CameraParameters::FALSE);
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
}
} else {
- mParameters.set(TICameraParameters::KEY_GLBCE,
- CameraParameters::FALSE);
+ mParameters.set(TICameraParameters::KEY_GLBCE, android::CameraParameters::FALSE);
}
///Update the current parameter set
@@ -684,7 +681,7 @@ int CameraHal::setParameters(const CameraParameters& params)
if((valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION)) != NULL) {
if ( strcmp(mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED),
- CameraParameters::TRUE) == 0 ) {
+ android::CameraParameters::TRUE) == 0 ) {
CAMHAL_LOGDB("Mechanical Mialignment Correction is %s", valstr);
mParameters.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, valstr);
} else {
@@ -749,11 +746,10 @@ int CameraHal::setParameters(const CameraParameters& params)
}
}
- CAMHAL_LOGD("wb: %s", params.get(CameraParameters::KEY_WHITE_BALANCE));
- if ((valstr = params.get(CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_WHITE_BALANCE)) != NULL) {
if ( isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE))) {
CAMHAL_LOGDB("White balance set %s", valstr);
- mParameters.set(CameraParameters::KEY_WHITE_BALANCE, valstr);
+ mParameters.set(android::CameraParameters::KEY_WHITE_BALANCE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid white balance = %s", valstr);
return BAD_VALUE;
@@ -800,10 +796,10 @@ int CameraHal::setParameters(const CameraParameters& params)
}
}
- if ((valstr = params.get(CameraParameters::KEY_ANTIBANDING)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_ANTIBANDING)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING))) {
CAMHAL_LOGDB("Antibanding set %s", valstr);
- mParameters.set(CameraParameters::KEY_ANTIBANDING, valstr);
+ mParameters.set(android::CameraParameters::KEY_ANTIBANDING, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Antibanding = %s", valstr);
return BAD_VALUE;
@@ -820,10 +816,10 @@ int CameraHal::setParameters(const CameraParameters& params)
}
}
- if( (valstr = params.get(CameraParameters::KEY_FOCUS_AREAS)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_FOCUS_AREAS)) != NULL )
{
- CAMHAL_LOGDB("Focus areas position set %s", params.get(CameraParameters::KEY_FOCUS_AREAS));
- mParameters.set(CameraParameters::KEY_FOCUS_AREAS, valstr);
+ CAMHAL_LOGDB("Focus areas position set %s", params.get(android::CameraParameters::KEY_FOCUS_AREAS));
+ mParameters.set(android::CameraParameters::KEY_FOCUS_AREAS, valstr);
}
if( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
@@ -831,11 +827,11 @@ int CameraHal::setParameters(const CameraParameters& params)
CAMHAL_LOGDB("Measurements set to %s", valstr);
mParameters.set(TICameraParameters::KEY_MEASUREMENT_ENABLE, valstr);
- if (strcmp(valstr, CameraParameters::TRUE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, CameraParameters::FALSE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -846,110 +842,110 @@ int CameraHal::setParameters(const CameraParameters& params)
}
- if( (valstr = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
+ if( (valstr = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION)) != NULL)
{
- CAMHAL_LOGDB("Exposure compensation set %s", params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION));
- mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
+ CAMHAL_LOGDB("Exposure compensation set %s", params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ mParameters.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, valstr);
}
- if ((valstr = params.get(CameraParameters::KEY_SCENE_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_SCENE_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES))) {
CAMHAL_LOGDB("Scene mode set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_SCENE_MODE),
+ mParameters.get(android::CameraParameters::KEY_SCENE_MODE),
updateRequired);
- mParameters.set(CameraParameters::KEY_SCENE_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_SCENE_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Scene mode = %s", valstr);
return BAD_VALUE;
}
}
- if ((valstr = params.get(CameraParameters::KEY_FLASH_MODE)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_FLASH_MODE)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES))) {
CAMHAL_LOGDB("Flash mode set %s", valstr);
- mParameters.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ mParameters.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Flash mode = %s", valstr);
return BAD_VALUE;
}
}
- if ((valstr = params.get(CameraParameters::KEY_EFFECT)) != NULL) {
+ if ((valstr = params.get(android::CameraParameters::KEY_EFFECT)) != NULL) {
if (isParameterValid(valstr, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS))) {
CAMHAL_LOGDB("Effect set %s", valstr);
- mParameters.set(CameraParameters::KEY_EFFECT, valstr);
+ mParameters.set(android::CameraParameters::KEY_EFFECT, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Effect = %s", valstr);
return BAD_VALUE;
}
}
- if(( (valstr = params.get(CameraParameters::KEY_ROTATION)) != NULL)
- && (params.getInt(CameraParameters::KEY_ROTATION) >=0))
+ if(( (valstr = params.get(android::CameraParameters::KEY_ROTATION)) != NULL)
+ && (params.getInt(android::CameraParameters::KEY_ROTATION) >=0))
{
- CAMHAL_LOGDB("Rotation set %s", params.get(CameraParameters::KEY_ROTATION));
- mParameters.set(CameraParameters::KEY_ROTATION, valstr);
+ CAMHAL_LOGDB("Rotation set %s", params.get(android::CameraParameters::KEY_ROTATION));
+ mParameters.set(android::CameraParameters::KEY_ROTATION, valstr);
}
- if(( (valstr = params.get(CameraParameters::KEY_JPEG_QUALITY)) != NULL)
- && (params.getInt(CameraParameters::KEY_JPEG_QUALITY) >=0))
+ if(( (valstr = params.get(android::CameraParameters::KEY_JPEG_QUALITY)) != NULL)
+ && (params.getInt(android::CameraParameters::KEY_JPEG_QUALITY) >=0))
{
- CAMHAL_LOGDB("Jpeg quality set %s", params.get(CameraParameters::KEY_JPEG_QUALITY));
- mParameters.set(CameraParameters::KEY_JPEG_QUALITY, valstr);
+ CAMHAL_LOGDB("Jpeg quality set %s", params.get(android::CameraParameters::KEY_JPEG_QUALITY));
+ mParameters.set(android::CameraParameters::KEY_JPEG_QUALITY, valstr);
}
- if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH)) != NULL)
- && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >=0))
+ if(( (valstr = params.get(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH)) != NULL)
+ && (params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >=0))
{
- CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH));
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, valstr);
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH));
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, valstr);
}
- if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT)) != NULL)
- && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >=0))
+ if(( (valstr = params.get(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT)) != NULL)
+ && (params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >=0))
{
- CAMHAL_LOGDB("Thumbnail width set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT));
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, valstr);
+ CAMHAL_LOGDB("Thumbnail width set %s", params.get(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT));
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, valstr);
}
- if(( (valstr = params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY)) != NULL )
- && (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >=0))
+ if(( (valstr = params.get(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY)) != NULL )
+ && (params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >=0))
{
- CAMHAL_LOGDB("Thumbnail quality set %s", params.get(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY));
- mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, valstr);
+ CAMHAL_LOGDB("Thumbnail quality set %s", params.get(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY));
+ mParameters.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_LATITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS latitude set %s", params.get(CameraParameters::KEY_GPS_LATITUDE));
- mParameters.set(CameraParameters::KEY_GPS_LATITUDE, valstr);
+ CAMHAL_LOGDB("GPS latitude set %s", params.get(android::CameraParameters::KEY_GPS_LATITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LATITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_LATITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS longitude set %s", params.get(CameraParameters::KEY_GPS_LONGITUDE));
- mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, valstr);
+ CAMHAL_LOGDB("GPS longitude set %s", params.get(android::CameraParameters::KEY_GPS_LONGITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_LONGITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_LONGITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE)) != NULL )
{
- CAMHAL_LOGDB("GPS altitude set %s", params.get(CameraParameters::KEY_GPS_ALTITUDE));
- mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, valstr);
+ CAMHAL_LOGDB("GPS altitude set %s", params.get(android::CameraParameters::KEY_GPS_ALTITUDE));
+ mParameters.set(android::CameraParameters::KEY_GPS_ALTITUDE, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ mParameters.remove(android::CameraParameters::KEY_GPS_ALTITUDE);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
- CAMHAL_LOGDB("GPS timestamp set %s", params.get(CameraParameters::KEY_GPS_TIMESTAMP));
- mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, valstr);
+ CAMHAL_LOGDB("GPS timestamp set %s", params.get(android::CameraParameters::KEY_GPS_TIMESTAMP));
+ mParameters.set(android::CameraParameters::KEY_GPS_TIMESTAMP, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ mParameters.remove(android::CameraParameters::KEY_GPS_TIMESTAMP);
}
if( (valstr = params.get(TICameraParameters::KEY_GPS_DATESTAMP)) != NULL )
@@ -960,12 +956,12 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.remove(TICameraParameters::KEY_GPS_DATESTAMP);
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD)) != NULL )
{
- CAMHAL_LOGDB("GPS processing method set %s", params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD));
- mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
+ CAMHAL_LOGDB("GPS processing method set %s", params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD));
+ mParameters.set(android::CameraParameters::KEY_GPS_PROCESSING_METHOD, valstr);
}else{
- mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ mParameters.remove(android::CameraParameters::KEY_GPS_PROCESSING_METHOD);
}
if( (valstr = params.get(TICameraParameters::KEY_GPS_MAPDATUM )) != NULL )
@@ -1018,41 +1014,41 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.remove(TICameraParameters::KEY_ZOOM_BRACKETING_RANGE);
}
- if ((valstr = params.get(CameraParameters::KEY_ZOOM)) != NULL ) {
- if ((params.getInt(CameraParameters::KEY_ZOOM) >= 0 ) &&
- (params.getInt(CameraParameters::KEY_ZOOM) <= mMaxZoomSupported )) {
+ if ((valstr = params.get(android::CameraParameters::KEY_ZOOM)) != NULL ) {
+ if ((params.getInt(android::CameraParameters::KEY_ZOOM) >= 0 ) &&
+ (params.getInt(android::CameraParameters::KEY_ZOOM) <= mMaxZoomSupported )) {
CAMHAL_LOGDB("Zoom set %s", valstr);
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_ZOOM),
+ mParameters.get(android::CameraParameters::KEY_ZOOM),
updateRequired);
- mParameters.set(CameraParameters::KEY_ZOOM, valstr);
+ mParameters.set(android::CameraParameters::KEY_ZOOM, valstr);
} else {
CAMHAL_LOGEB("ERROR: Invalid Zoom: %s", valstr);
return BAD_VALUE;
}
}
- if( (valstr = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK)) != NULL )
{
- CAMHAL_LOGDB("Auto Exposure Lock set %s", params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK));
+ CAMHAL_LOGDB("Auto Exposure Lock set %s", params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK));
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
+ mParameters.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK),
updateRequired);
- mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
+ mParameters.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK)) != NULL )
{
- CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
+ CAMHAL_LOGDB("Auto WhiteBalance Lock set %s", params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK));
doesSetParameterNeedUpdate(valstr,
- mParameters.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
+ mParameters.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK),
updateRequired);
- mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
+ mParameters.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, valstr);
}
- if( (valstr = params.get(CameraParameters::KEY_METERING_AREAS)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_METERING_AREAS)) != NULL )
{
- CAMHAL_LOGDB("Metering areas position set %s", params.get(CameraParameters::KEY_METERING_AREAS));
- mParameters.set(CameraParameters::KEY_METERING_AREAS, valstr);
+ CAMHAL_LOGDB("Metering areas position set %s", params.get(android::CameraParameters::KEY_METERING_AREAS));
+ mParameters.set(android::CameraParameters::KEY_METERING_AREAS, valstr);
}
if( (valstr = params.get(TICameraParameters::RAW_WIDTH)) != NULL ) {
@@ -1102,7 +1098,7 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.set(TICameraParameters::KEY_ALGO_GIC, valstr);
}
- CameraParameters adapterParams = mParameters;
+ android::CameraParameters adapterParams = mParameters;
if( NULL != params.get(TICameraParameters::KEY_TEMP_BRACKETING_RANGE_POS) )
{
@@ -1126,7 +1122,7 @@ int CameraHal::setParameters(const CameraParameters& params)
CAMHAL_LOGDB("Negative bracketing range %d", mBracketRangeNegative);
if( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL) &&
- ( strcmp(valstr, CameraParameters::TRUE) == 0 )) {
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 )) {
if ( !mBracketingEnabled ) {
CAMHAL_LOGDA("Enabling bracketing");
mBracketingEnabled = true;
@@ -1136,7 +1132,7 @@ int CameraHal::setParameters(const CameraParameters& params)
adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
mParameters.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
} else if ( ( (valstr = params.get(TICameraParameters::KEY_TEMP_BRACKETING)) != NULL ) &&
- ( strcmp(valstr, CameraParameters::FALSE) == 0 )) {
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 )) {
CAMHAL_LOGDA("Disabling bracketing");
adapterParams.set(TICameraParameters::KEY_TEMP_BRACKETING, valstr);
@@ -1175,7 +1171,7 @@ int CameraHal::setParameters(const CameraParameters& params)
}
if( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
- ( strcmp(valstr, CameraParameters::TRUE) == 0 ))
+ ( strcmp(valstr, android::CameraParameters::TRUE) == 0 ))
{
CAMHAL_LOGDA("Enabling shutter sound");
@@ -1184,7 +1180,7 @@ int CameraHal::setParameters(const CameraParameters& params)
mParameters.set(TICameraParameters::KEY_SHUTTER_ENABLE, valstr);
}
else if ( ( (valstr = params.get(TICameraParameters::KEY_SHUTTER_ENABLE)) != NULL ) &&
- ( strcmp(valstr, CameraParameters::FALSE) == 0 ))
+ ( strcmp(valstr, android::CameraParameters::FALSE) == 0 ))
{
CAMHAL_LOGDA("Disabling shutter sound");
@@ -1436,7 +1432,7 @@ status_t CameraHal::allocVideoBufs(uint32_t width, uint32_t height, uint32_t buf
if (buffers != NULL){
for (unsigned int i = 0; i< bufferCount; i++){
- GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
buffer_handle_t handle;
ret = GrallocAlloc.alloc(width, height, HAL_PIXEL_FORMAT_NV12, CAMHAL_GRALLOC_USAGE, &handle, &stride);
if (ret != NO_ERROR){
@@ -1538,8 +1534,7 @@ status_t CameraHal::signalEndImageCapture()
{
status_t ret = NO_ERROR;
int w,h;
- CameraParameters adapterParams = mParameters;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -1601,7 +1596,7 @@ status_t CameraHal::freeVideoBufs(CameraBuffer *bufs)
return BAD_VALUE;
}
- GraphicBufferAllocator &GrallocAlloc = GraphicBufferAllocator::get();
+ android::GraphicBufferAllocator &GrallocAlloc = android::GraphicBufferAllocator::get();
for(int i = 0; i < count; i++){
CAMHAL_LOGVB("Free Video Gralloc Handle 0x%x", bufs[i].opaque);
@@ -2249,19 +2244,19 @@ status_t CameraHal::startRecording( )
// set internal recording hint in case camera adapter needs to make some
// decisions....(will only be sent to camera adapter if camera restart is required)
- mParameters.set(TICameraParameters::KEY_RECORDING_HINT, CameraParameters::TRUE);
+ mParameters.set(TICameraParameters::KEY_RECORDING_HINT, android::CameraParameters::TRUE);
// if application starts recording in continuous focus picture mode...
// then we need to force default capture mode (as opposed to video mode)
- if ( ((valstr = mParameters.get(CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
- (strcmp(valstr, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
+ if ( ((valstr = mParameters.get(android::CameraParameters::KEY_FOCUS_MODE)) != NULL) &&
+ (strcmp(valstr, android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE) == 0) ){
restartPreviewRequired = resetVideoModeParameters();
}
// only need to check recording hint if preview restart is not already needed
- valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
if ( !restartPreviewRequired &&
- (!valstr || (valstr && (strcmp(valstr, CameraParameters::TRUE) != 0))) ) {
+ (!valstr || (valstr && (strcmp(valstr, android::CameraParameters::TRUE) != 0))) ) {
restartPreviewRequired = setVideoModeParameters(mParameters);
}
@@ -2330,7 +2325,7 @@ status_t CameraHal::startRecording( )
@todo Modify the policies for enabling VSTAB & VNF usecase based later.
*/
-bool CameraHal::setVideoModeParameters(const CameraParameters& params)
+bool CameraHal::setVideoModeParameters(const android::CameraParameters& params)
{
const char *valstr = NULL;
bool restartPreviewRequired = false;
@@ -2348,29 +2343,29 @@ bool CameraHal::setVideoModeParameters(const CameraParameters& params)
}
// set VSTAB. restart is required if vstab value has changed
- if (params.get(CameraParameters::KEY_VIDEO_STABILIZATION) != NULL) {
+ if (params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION) != NULL) {
// make sure we support vstab
if (strcmp(mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED),
- CameraParameters::TRUE) == 0) {
- valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
+ android::CameraParameters::TRUE) == 0) {
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
// vstab value has changed
if ((valstr != NULL) &&
- strcmp(valstr, params.get(CameraParameters::KEY_VIDEO_STABILIZATION)) != 0) {
+ strcmp(valstr, params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) != 0) {
restartPreviewRequired = true;
}
- mParameters.set(CameraParameters::KEY_VIDEO_STABILIZATION,
- params.get(CameraParameters::KEY_VIDEO_STABILIZATION));
+ mParameters.set(android::CameraParameters::KEY_VIDEO_STABILIZATION,
+ params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION));
}
- } else if (mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION)) {
+ } else if (mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION)) {
// vstab was configured but now unset
restartPreviewRequired = true;
- mParameters.remove(CameraParameters::KEY_VIDEO_STABILIZATION);
+ mParameters.remove(android::CameraParameters::KEY_VIDEO_STABILIZATION);
}
// Set VNF
if (params.get(TICameraParameters::KEY_VNF) == NULL) {
CAMHAL_LOGDA("Enable VNF");
- mParameters.set(TICameraParameters::KEY_VNF, CameraParameters::TRUE);
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
restartPreviewRequired = true;
} else {
valstr = mParameters.get(TICameraParameters::KEY_VNF);
@@ -2385,10 +2380,10 @@ bool CameraHal::setVideoModeParameters(const CameraParameters& params)
// So we are forcefully enabling VNF, if VSTAB is enabled for 1080p resolution.
int w, h;
params.getPreviewSize(&w, &h);
- valstr = mParameters.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && (strcmp(valstr, CameraParameters::TRUE) == 0) && (w == 1920)) {
+ valstr = mParameters.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && (strcmp(valstr, android::CameraParameters::TRUE) == 0) && (w == 1920)) {
CAMHAL_LOGDA("Force Enable VNF for 1080p");
- mParameters.set(TICameraParameters::KEY_VNF, CameraParameters::TRUE);
+ mParameters.set(TICameraParameters::KEY_VNF, android::CameraParameters::TRUE);
restartPreviewRequired = true;
}
@@ -2452,7 +2447,7 @@ status_t CameraHal::restartPreview()
forceStopPreview();
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!mCapModeBackup.isEmpty()) {
mParameters.set(TICameraParameters::KEY_CAP_MODE, mCapModeBackup.string());
} else {
@@ -2481,7 +2476,7 @@ void CameraHal::stopRecording()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if (!mRecordingEnabled )
{
@@ -2581,7 +2576,7 @@ status_t CameraHal::autoFocus()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mMsgEnabled |= CAMERA_MSG_FOCUS;
@@ -2638,13 +2633,13 @@ status_t CameraHal::cancelAutoFocus()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
- CameraParameters adapterParams = mParameters;
+ android::AutoMutex lock(mLock);
+ android::CameraParameters adapterParams = mParameters;
mMsgEnabled &= ~CAMERA_MSG_FOCUS;
if( NULL != mCameraAdapter )
{
- adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, CameraParameters::FALSE);
+ adapterParams.set(TICameraParameters::KEY_AUTO_FOCUS_LOCK, android::CameraParameters::FALSE);
mCameraAdapter->setParameters(adapterParams);
mCameraAdapter->sendCommand(CameraAdapter::CAMERA_CANCEL_AUTOFOCUS);
mAppCallbackNotifier->flushEventQueue();
@@ -2831,7 +2826,7 @@ status_t CameraHal::stopImageBracketing()
*/
status_t CameraHal::takePicture(const char *params)
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
return __takePicture(params);
}
@@ -2905,39 +2900,39 @@ status_t CameraHal::__takePicture(const char *params)
// we don't have to parse through the whole set of parameters
// in camera adapter
if (strlen(params) > 0) {
- ShotParameters shotParams;
+ android::ShotParameters shotParams;
const char *valStr;
const char *valExpComp, *valExpGain;
int valNum;
- String8 shotParams8(params);
+ android::String8 shotParams8(params);
shotParams.unflatten(shotParams8);
mParameters.remove(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE);
mParameters.remove(TICameraParameters::KEY_EXP_BRACKETING_RANGE);
- valExpGain = shotParams.get(ShotParameters::KEY_EXP_GAIN_PAIRS);
- valExpComp = shotParams.get(ShotParameters::KEY_EXP_COMPENSATION);
+ valExpGain = shotParams.get(android::ShotParameters::KEY_EXP_GAIN_PAIRS);
+ valExpComp = shotParams.get(android::ShotParameters::KEY_EXP_COMPENSATION);
if (NULL != valExpComp) {
mParameters.set(TICameraParameters::KEY_EXP_BRACKETING_RANGE, valExpComp);
} else if (NULL != valExpGain) {
mParameters.set(TICameraParameters::KEY_EXP_GAIN_BRACKETING_RANGE, valExpGain);
}
- valNum = shotParams.getInt(ShotParameters::KEY_BURST);
+ valNum = shotParams.getInt(android::ShotParameters::KEY_BURST);
if (valNum >= 0) {
mParameters.set(TICameraParameters::KEY_BURST, valNum);
burst = valNum;
}
- valStr = shotParams.get(ShotParameters::KEY_FLUSH_CONFIG);
+ valStr = shotParams.get(android::ShotParameters::KEY_FLUSH_CONFIG);
if (valStr!= NULL) {
- if ( 0 == strcmp(valStr, ShotParameters::TRUE) ) {
+ if ( 0 == strcmp(valStr, android::ShotParameters::TRUE) ) {
mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
- CameraParameters::TRUE);
- } else if ( 0 == strcmp(valStr, ShotParameters::FALSE) ) {
+ android::CameraParameters::TRUE);
+ } else if ( 0 == strcmp(valStr, android::ShotParameters::FALSE) ) {
mParameters.set(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE,
- CameraParameters::FALSE);
+ android::CameraParameters::FALSE);
}
}
@@ -3071,7 +3066,7 @@ status_t CameraHal::__takePicture(const char *params)
CAMHAL_LOGDB("Raw capture buffers setup - %s", mParameters.getPictureFormat());
ret = allocRawBufs(mParameters.getInt(TICameraParameters::RAW_WIDTH),
mParameters.getInt(TICameraParameters::RAW_HEIGHT),
- CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
+ android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB,
rawBufferCount);
if ( NO_ERROR != ret ) {
@@ -3143,7 +3138,7 @@ status_t CameraHal::cancelPicture( )
*/
char* CameraHal::getParameters()
{
- String8 params_str8;
+ android::String8 params_str8;
char* params_string;
const char * valstr = NULL;
@@ -3156,33 +3151,33 @@ char* CameraHal::getParameters()
if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT)) != NULL ) {
if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
- mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
} else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
- mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
} else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
|| (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
- mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
}
}
if ( (valstr = mParameters.get(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT)) != NULL ) {
if (!strcmp(TICameraParameters::S3D_TB_FULL, valstr)) {
- mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
} else if (!strcmp(TICameraParameters::S3D_SS_FULL, valstr)) {
- mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
} else if ((!strcmp(TICameraParameters::S3D_TB_SUBSAMPLED, valstr))
|| (!strcmp(TICameraParameters::S3D_SS_SUBSAMPLED, valstr))) {
- mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
+ mParameters.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mParameters.get(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
}
}
- CameraParameters mParams = mParameters;
+ android::CameraParameters mParams = mParameters;
// Handle RECORDING_HINT to Set/Reset Video Mode Parameters
- valstr = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ valstr = mParameters.get(android::CameraParameters::KEY_RECORDING_HINT);
if(valstr != NULL)
{
- if(strcmp(valstr, CameraParameters::TRUE) == 0)
+ if(strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
//HACK FOR MMS MODE
resetPreviewRes(&mParams);
@@ -3217,9 +3212,9 @@ status_t CameraHal::reprocess(const char *params)
int bufferCount = 0;
CameraAdapter::BuffersDescriptor desc;
CameraBuffer *reprocBuffers = NULL;
- ShotParameters shotParams;
+ android::ShotParameters shotParams;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
LOG_FUNCTION_NAME;
@@ -3893,37 +3888,37 @@ void CameraHal::insertSupportedParams()
{
LOG_FUNCTION_NAME;
- CameraParameters &p = mParameters;
+ android::CameraParameters &p = mParameters;
///Set the name of the camera
p.set(TICameraParameters::KEY_CAMERA_NAME, mCameraProperties->get(CameraProperties::CAMERA_NAME));
mMaxZoomSupported = atoi(mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
- p.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_FORMATS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FORMATS));
p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SUBSAMPLED_SIZES));
p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_SIDEBYSIDE_SIZES));
p.set(TICameraParameters::KEY_SUPPORTED_PICTURE_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PICTURE_TOPBOTTOM_SIZES));
p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SUBSAMPLED_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SUBSAMPLED_SIZES));
p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_SIDEBYSIDE_SIZES));
p.set(TICameraParameters::KEY_SUPPORTED_PREVIEW_TOPBOTTOM_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_TOPBOTTOM_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
p.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
- p.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
p.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE_EXT_SUPPORTED));
- p.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
- p.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
- p.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
- p.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
- p.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
- p.set(CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
- p.set(CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
- p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
+ p.set(android::CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES, mCameraProperties->get(CameraProperties::SUPPORTED_THUMBNAIL_SIZES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE, mCameraProperties->get(CameraProperties::SUPPORTED_WHITE_BALANCE));
+ p.set(android::CameraParameters::KEY_SUPPORTED_EFFECTS, mCameraProperties->get(CameraProperties::SUPPORTED_EFFECTS));
+ p.set(android::CameraParameters::KEY_SUPPORTED_SCENE_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_SCENE_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FLASH_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES, mCameraProperties->get(CameraProperties::SUPPORTED_FOCUS_MODES));
+ p.set(android::CameraParameters::KEY_SUPPORTED_ANTIBANDING, mCameraProperties->get(CameraProperties::SUPPORTED_ANTIBANDING));
+ p.set(android::CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MAX));
+ p.set(android::CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::SUPPORTED_EV_MIN));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_EV_STEP));
p.set(TICameraParameters::KEY_SUPPORTED_EXPOSURE, mCameraProperties->get(CameraProperties::SUPPORTED_EXPOSURE_MODES));
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MIN));
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_EXPOSURE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_EXPOSURE_MAX));
@@ -3932,10 +3927,10 @@ void CameraHal::insertSupportedParams()
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_MAX));
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_GAIN_ISO_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_GAIN_ISO_STEP));
p.set(TICameraParameters::KEY_SUPPORTED_ISO_VALUES, mCameraProperties->get(CameraProperties::SUPPORTED_ISO_VALUES));
- p.set(CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
- p.set(CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
- p.set(CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
- p.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_ZOOM_RATIOS, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_RATIOS));
+ p.set(android::CameraParameters::KEY_MAX_ZOOM, mCameraProperties->get(CameraProperties::SUPPORTED_ZOOM_STAGES));
+ p.set(android::CameraParameters::KEY_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::ZOOM_SUPPORTED));
+ p.set(android::CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, mCameraProperties->get(CameraProperties::SMOOTH_ZOOM_SUPPORTED));
p.set(TICameraParameters::KEY_SUPPORTED_IPP, mCameraProperties->get(CameraProperties::SUPPORTED_IPP_MODES));
p.set(TICameraParameters::KEY_S3D_PRV_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_PRV_FRAME_LAYOUT_VALUES));
p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT_VALUES, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT_VALUES));
@@ -3943,11 +3938,11 @@ void CameraHal::insertSupportedParams()
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MIN, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MIN));
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_MAX, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_MAX));
p.set(TICameraParameters::KEY_SUPPORTED_MANUAL_CONVERGENCE_STEP, mCameraProperties->get(CameraProperties::SUPPORTED_MANUAL_CONVERGENCE_STEP));
- p.set(CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION_SUPPORTED, mCameraProperties->get(CameraProperties::VSTAB_SUPPORTED));
p.set(TICameraParameters::KEY_VNF_SUPPORTED, mCameraProperties->get(CameraProperties::VNF_SUPPORTED));
- p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
- p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
- p.set(CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED));
+ p.set(android::CameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED, mCameraProperties->get(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED));
p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED));
p.set(TICameraParameters::KEY_CAP_MODE_VALUES, mCameraProperties->get(CameraProperties::CAP_MODE_VALUES));
@@ -3960,7 +3955,7 @@ void CameraHal::initDefaultParameters()
//Purpose of this function is to initialize the default current and supported parameters for the currently
//selected camera.
- CameraParameters &p = mParameters;
+ android::CameraParameters &p = mParameters;
int currentRevision, adapterRevision;
status_t ret = NO_ERROR;
int width, height;
@@ -3996,29 +3991,29 @@ void CameraHal::initDefaultParameters()
if ( NO_ERROR == ret )
{
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
}
else
{
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, MIN_WIDTH);
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, MIN_HEIGHT);
}
//Insert default values
p.setPreviewFrameRate(atoi(mCameraProperties->get(CameraProperties::PREVIEW_FRAME_RATE)));
- p.set(CameraParameters::KEY_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
+ p.set(android::CameraParameters::KEY_PREVIEW_FPS_RANGE, mCameraProperties->get(CameraProperties::FRAMERATE_RANGE));
p.setPreviewFormat(mCameraProperties->get(CameraProperties::PREVIEW_FORMAT));
p.setPictureFormat(mCameraProperties->get(CameraProperties::PICTURE_FORMAT));
- p.set(CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
- p.set(CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
- p.set(CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
- p.set(CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
- p.set(CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
- p.set(CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
- p.set(CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
- p.set(CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
- p.set(CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
+ p.set(android::CameraParameters::KEY_JPEG_QUALITY, mCameraProperties->get(CameraProperties::JPEG_QUALITY));
+ p.set(android::CameraParameters::KEY_WHITE_BALANCE, mCameraProperties->get(CameraProperties::WHITEBALANCE));
+ p.set(android::CameraParameters::KEY_EFFECT, mCameraProperties->get(CameraProperties::EFFECT));
+ p.set(android::CameraParameters::KEY_ANTIBANDING, mCameraProperties->get(CameraProperties::ANTIBANDING));
+ p.set(android::CameraParameters::KEY_FLASH_MODE, mCameraProperties->get(CameraProperties::FLASH_MODE));
+ p.set(android::CameraParameters::KEY_FOCUS_MODE, mCameraProperties->get(CameraProperties::FOCUS_MODE));
+ p.set(android::CameraParameters::KEY_EXPOSURE_COMPENSATION, mCameraProperties->get(CameraProperties::EV_COMPENSATION));
+ p.set(android::CameraParameters::KEY_SCENE_MODE, mCameraProperties->get(CameraProperties::SCENE_MODE));
+ p.set(android::CameraParameters::KEY_ZOOM, mCameraProperties->get(CameraProperties::ZOOM));
p.set(TICameraParameters::KEY_CONTRAST, mCameraProperties->get(CameraProperties::CONTRAST));
p.set(TICameraParameters::KEY_SATURATION, mCameraProperties->get(CameraProperties::SATURATION));
p.set(TICameraParameters::KEY_BRIGHTNESS, mCameraProperties->get(CameraProperties::BRIGHTNESS));
@@ -4038,36 +4033,36 @@ void CameraHal::initDefaultParameters()
p.set(TICameraParameters::KEY_S3D_CAP_FRAME_LAYOUT, mCameraProperties->get(CameraProperties::S3D_CAP_FRAME_LAYOUT));
p.set(TICameraParameters::KEY_AUTOCONVERGENCE_MODE, mCameraProperties->get(CameraProperties::AUTOCONVERGENCE_MODE));
p.set(TICameraParameters::KEY_MANUAL_CONVERGENCE, mCameraProperties->get(CameraProperties::MANUAL_CONVERGENCE));
- p.set(CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
+ p.set(android::CameraParameters::KEY_VIDEO_STABILIZATION, mCameraProperties->get(CameraProperties::VSTAB));
p.set(TICameraParameters::KEY_VNF, mCameraProperties->get(CameraProperties::VNF));
- p.set(CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
- p.set(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
- p.set(CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
+ p.set(android::CameraParameters::KEY_FOCAL_LENGTH, mCameraProperties->get(CameraProperties::FOCAL_LENGTH));
+ p.set(android::CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::HOR_ANGLE));
+ p.set(android::CameraParameters::KEY_VERTICAL_VIEW_ANGLE, mCameraProperties->get(CameraProperties::VER_ANGLE));
p.set(TICameraParameters::KEY_SENSOR_ORIENTATION, mCameraProperties->get(CameraProperties::SENSOR_ORIENTATION));
p.set(TICameraParameters::KEY_EXIF_MAKE, mCameraProperties->get(CameraProperties::EXIF_MAKE));
p.set(TICameraParameters::KEY_EXIF_MODEL, mCameraProperties->get(CameraProperties::EXIF_MODEL));
- p.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
- p.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
- p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
- p.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
+ p.set(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, mCameraProperties->get(CameraProperties::JPEG_THUMBNAIL_QUALITY));
+ p.set(android::CameraParameters::KEY_VIDEO_FRAME_FORMAT, "OMX_TI_COLOR_FormatYUV420PackedSemiPlanar");
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, mCameraProperties->get(CameraProperties::MAX_FD_HW_FACES));
+ p.set(android::CameraParameters::KEY_MAX_NUM_DETECTED_FACES_SW, mCameraProperties->get(CameraProperties::MAX_FD_SW_FACES));
p.set(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION, mCameraProperties->get(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION));
// Only one area a.k.a Touch AF for now.
// TODO: Add support for multiple focus areas.
- p.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
- p.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
- p.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
- p.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
+ p.set(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, mCameraProperties->get(CameraProperties::MAX_FOCUS_AREAS));
+ p.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK, mCameraProperties->get(CameraProperties::AUTO_EXPOSURE_LOCK));
+ p.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, mCameraProperties->get(CameraProperties::AUTO_WHITEBALANCE_LOCK));
+ p.set(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS, mCameraProperties->get(CameraProperties::MAX_NUM_METERING_AREAS));
p.set(TICameraParameters::RAW_WIDTH, mCameraProperties->get(CameraProperties::RAW_WIDTH));
p.set(TICameraParameters::RAW_HEIGHT,mCameraProperties->get(CameraProperties::RAW_HEIGHT));
// TI extensions for enable/disable algos
// Hadcoded for now
- p.set(TICameraParameters::KEY_ALGO_FIXED_GAMMA, CameraParameters::TRUE);
- p.set(TICameraParameters::KEY_ALGO_NSF1, CameraParameters::TRUE);
- p.set(TICameraParameters::KEY_ALGO_NSF2, CameraParameters::TRUE);
- p.set(TICameraParameters::KEY_ALGO_SHARPENING, CameraParameters::TRUE);
- p.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, CameraParameters::TRUE);
- p.set(TICameraParameters::KEY_ALGO_GIC, CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_FIXED_GAMMA, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_NSF1, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_NSF2, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_SHARPENING, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_THREELINCOLORMAP, android::CameraParameters::TRUE);
+ p.set(TICameraParameters::KEY_ALGO_GIC, android::CameraParameters::TRUE);
LOG_FUNCTION_NAME_EXIT;
}
@@ -4180,7 +4175,7 @@ void CameraHal::getPreferredPreviewRes(int *width, int *height)
LOG_FUNCTION_NAME_EXIT;
}
-void CameraHal::resetPreviewRes(CameraParameters *params)
+void CameraHal::resetPreviewRes(android::CameraParameters *params)
{
LOG_FUNCTION_NAME;
@@ -4208,4 +4203,5 @@ camera_buffer_get_omx_ptr (CameraBuffer *buffer)
}
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalCommon.cpp b/camera/CameraHalCommon.cpp
index ea97e84..051a785 100644
--- a/camera/CameraHalCommon.cpp
+++ b/camera/CameraHalCommon.cpp
@@ -16,7 +16,8 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const char CameraHal::PARAMS_DELIMITER []= ",";
@@ -116,6 +117,5 @@ void CameraHal::PPM(const char* str, struct timeval* ppm_first, ...){
#endif
-};
-
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHalUtilClasses.cpp b/camera/CameraHalUtilClasses.cpp
index 7b380f9..53c9a55 100644
--- a/camera/CameraHalUtilClasses.cpp
+++ b/camera/CameraHalUtilClasses.cpp
@@ -23,7 +23,8 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
/*--------------------FrameProvider Class STARTS here-----------------------------*/
@@ -196,7 +197,7 @@ status_t CameraArea::checkArea(ssize_t top,
status_t CameraArea::parseAreas(const char *area,
size_t areaLength,
- Vector< sp<CameraArea> > &areas)
+ android::Vector<android::sp<CameraArea> > &areas)
{
status_t ret = NO_ERROR;
char *ctx;
@@ -208,7 +209,7 @@ status_t CameraArea::parseAreas(const char *area,
const char sep = ',';
ssize_t top, left, bottom, right, weight;
char *tmpBuffer = NULL;
- sp<CameraArea> currentArea;
+ android::sp<CameraArea> currentArea;
LOG_FUNCTION_NAME
@@ -331,8 +332,8 @@ status_t CameraArea::parseAreas(const char *area,
return ret;
}
-bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
- Vector< sp<CameraArea> > &area2) {
+bool CameraArea::areAreasDifferent(android::Vector< android::sp<CameraArea> > &area1,
+ android::Vector< android::sp<CameraArea> > &area2) {
if (area1.size() != area2.size()) {
return true;
}
@@ -347,7 +348,7 @@ bool CameraArea::areAreasDifferent(Vector< sp<CameraArea> > &area1,
return false;
}
-bool CameraArea::compare(const sp<CameraArea> &area) {
+bool CameraArea::compare(const android::sp<CameraArea> &area) {
return ((mTop == area->mTop) && (mLeft == area->mLeft) &&
(mBottom == area->mBottom) && (mRight == area->mRight) &&
(mWeight == area->mWeight));
@@ -356,4 +357,5 @@ bool CameraArea::compare(const sp<CameraArea> &area) {
/*--------------------CameraArea Class ENDS here-----------------------------*/
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraHal_Module.cpp b/camera/CameraHal_Module.cpp
index eb4a475..2d2e10b 100644
--- a/camera/CameraHal_Module.cpp
+++ b/camera/CameraHal_Module.cpp
@@ -35,8 +35,11 @@
#endif
-static android::CameraProperties gCameraProperties;
-static android::CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
+namespace Ti {
+namespace Camera {
+
+static CameraProperties gCameraProperties;
+static CameraHal* gCameraHals[MAX_CAMERAS_SUPPORTED];
static unsigned int gCamerasOpen = 0;
static android::Mutex gCameraHalDeviceLock;
@@ -50,6 +53,10 @@ static struct hw_module_methods_t camera_module_methods = {
open: camera_device_open
};
+} // namespace Camera
+} // namespace Ti
+
+
camera_module_t HAL_MODULE_INFO_SYM = {
common: {
tag: HARDWARE_MODULE_TAG,
@@ -58,14 +65,18 @@ camera_module_t HAL_MODULE_INFO_SYM = {
id: CAMERA_HARDWARE_MODULE_ID,
name: "TI OMAP CameraHal Module",
author: "TI",
- methods: &camera_module_methods,
+ methods: &Ti::Camera::camera_module_methods,
dso: NULL, /* remove compilation warnings */
reserved: {0}, /* remove compilation warnings */
},
- get_number_of_cameras: camera_get_number_of_cameras,
- get_camera_info: camera_get_camera_info,
+ get_number_of_cameras: Ti::Camera::camera_get_number_of_cameras,
+ get_camera_info: Ti::Camera::camera_get_camera_info,
};
+
+namespace Ti {
+namespace Camera {
+
typedef struct ti_camera_device {
camera_device_t base;
/* TI specific "private" data can go here (base.priv) */
@@ -511,7 +522,7 @@ int camera_device_close(hw_device_t* device)
int ret = 0;
ti_camera_device_t* ti_dev = NULL;
- android::Mutex::Autolock lock(gCameraHalDeviceLock);
+ android::AutoMutex lock(gCameraHalDeviceLock);
if (!device) {
ret = -EINVAL;
@@ -557,10 +568,10 @@ int camera_device_open(const hw_module_t* module, const char* name,
int cameraid;
ti_camera_device_t* camera_device = NULL;
camera_device_ops_t* camera_ops = NULL;
- android::CameraHal* camera = NULL;
- android::CameraProperties::Properties* properties = NULL;
+ CameraHal* camera = NULL;
+ CameraProperties::Properties* properties = NULL;
- android::Mutex::Autolock lock(gCameraHalDeviceLock);
+ android::AutoMutex lock(gCameraHalDeviceLock);
CAMHAL_LOGI("camera_device open");
@@ -653,7 +664,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
goto fail;
}
- camera = new android::CameraHal(cameraid);
+ camera = new CameraHal(cameraid);
if(!camera)
{
@@ -662,7 +673,7 @@ int camera_device_open(const hw_module_t* module, const char* name,
goto fail;
}
- if(properties && (camera->initialize(properties) != android::NO_ERROR))
+ if(properties && (camera->initialize(properties) != NO_ERROR))
{
CAMHAL_LOGE("Couldn't initialize camera instance");
rv = -ENODEV;
@@ -698,7 +709,7 @@ int camera_get_number_of_cameras(void)
// this going to be the first call from camera service
// initialize camera properties here...
- if(gCameraProperties.initialize() != android::NO_ERROR)
+ if(gCameraProperties.initialize() != NO_ERROR)
{
CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
return NULL;
@@ -715,11 +726,11 @@ int camera_get_camera_info(int camera_id, struct camera_info *info)
int face_value = CAMERA_FACING_BACK;
int orientation = 0;
const char *valstr = NULL;
- android::CameraProperties::Properties* properties = NULL;
+ CameraProperties::Properties* properties = NULL;
// this going to be the first call from camera service
// initialize camera properties here...
- if(gCameraProperties.initialize() != android::NO_ERROR)
+ if(gCameraProperties.initialize() != NO_ERROR)
{
CAMHAL_LOGEA("Unable to create or initialize CameraProperties");
rv = -EINVAL;
@@ -736,20 +747,20 @@ int camera_get_camera_info(int camera_id, struct camera_info *info)
if(properties)
{
- valstr = properties->get(android::CameraProperties::FACING_INDEX);
+ valstr = properties->get(CameraProperties::FACING_INDEX);
if(valstr != NULL)
{
- if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_FRONT) == 0)
+ if (strcmp(valstr, TICameraParameters::FACING_FRONT) == 0)
{
face_value = CAMERA_FACING_FRONT;
}
- else if (strcmp(valstr, (const char *) android::TICameraParameters::FACING_BACK) == 0)
+ else if (strcmp(valstr, TICameraParameters::FACING_BACK) == 0)
{
face_value = CAMERA_FACING_BACK;
}
}
- valstr = properties->get(android::CameraProperties::ORIENTATION_INDEX);
+ valstr = properties->get(CameraProperties::ORIENTATION_INDEX);
if(valstr != NULL)
{
orientation = atoi(valstr);
@@ -766,3 +777,7 @@ int camera_get_camera_info(int camera_id, struct camera_info *info)
end:
return rv;
}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraParameters.cpp b/camera/CameraParameters.cpp
index a809115..82b1da4 100644
--- a/camera/CameraParameters.cpp
+++ b/camera/CameraParameters.cpp
@@ -23,7 +23,8 @@
#include "CameraProperties.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const char CameraProperties::INVALID[]="prop-invalid-key";
const char CameraProperties::CAMERA_NAME[]="prop-camera-name";
@@ -166,9 +167,9 @@ void CameraProperties::Properties::set(const char * const prop, const char * con
CAMHAL_ASSERT(prop);
if ( !value ) {
- mProperties[mCurrentMode].removeItem(String8(prop));
+ mProperties[mCurrentMode].removeItem(android::String8(prop));
} else {
- mProperties[mCurrentMode].replaceValueFor(String8(prop), String8(value));
+ mProperties[mCurrentMode].replaceValueFor(android::String8(prop), android::String8(value));
}
}
@@ -179,11 +180,11 @@ void CameraProperties::Properties::set(const char * const prop, const int value)
}
const char* CameraProperties::Properties::get(const char * prop) const {
- return mProperties[mCurrentMode].valueFor(String8(prop)).string();
+ return mProperties[mCurrentMode].valueFor(android::String8(prop)).string();
}
int CameraProperties::Properties::getInt(const char * prop) const {
- String8 value = mProperties[mCurrentMode].valueFor(String8(prop));
+ android::String8 value = mProperties[mCurrentMode].valueFor(android::String8(prop));
if (value.isEmpty()) {
return -1;
}
@@ -235,4 +236,5 @@ const char* CameraProperties::Properties::valueAt(const unsigned int index) cons
return NULL;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/CameraProperties.cpp b/camera/CameraProperties.cpp
index c0a1934..93bc953 100644
--- a/camera/CameraProperties.cpp
+++ b/camera/CameraProperties.cpp
@@ -26,7 +26,8 @@
#define CAMERA_ROOT "CameraRoot"
#define CAMERA_INSTANCE "CameraInstance"
-namespace android {
+namespace Ti {
+namespace Camera {
// lower entries have higher priority
static const char* g_camera_adapters[] = {
@@ -67,7 +68,7 @@ status_t CameraProperties::initialize()
status_t ret;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if(mInitialized)
return NO_ERROR;
@@ -130,4 +131,5 @@ int CameraProperties::camerasSupported()
return mCamerasSupported;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/Encoder_libjpeg.cpp b/camera/Encoder_libjpeg.cpp
index 6db9e7e..2164b3d 100644
--- a/camera/Encoder_libjpeg.cpp
+++ b/camera/Encoder_libjpeg.cpp
@@ -44,7 +44,9 @@ extern "C" {
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
#define MIN(x,y) ((x < y) ? x : y)
-namespace android {
+namespace Ti {
+namespace Camera {
+
struct integer_string_pair {
unsigned int integer;
const char* string;
@@ -444,14 +446,14 @@ size_t Encoder_libjpeg::encode(params* input) {
goto exit;
}
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
bpp = 1;
if ((in_width != out_width) || (in_height != out_height)) {
resize_src = (uint8_t*) malloc(input->dst_size);
resize_nv12(input, resize_src);
if (resize_src) src = resize_src;
}
- } else if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV422I) &&
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) &&
strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY)) {
// we currently only support yuv422i and yuv420sp
CAMHAL_LOGEB("Encoder: format not supported: %s", input->format);
@@ -496,11 +498,11 @@ size_t Encoder_libjpeg::encode(params* input) {
JSAMPROW row[1]; /* pointer to JSAMPLE row[s] */
// convert input yuv format to yuv444
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
nv21_to_yuv(row_tmp, row_src, row_uv, out_width - right_crop);
} else if (strcmp(input->format, TICameraParameters::PIXEL_FORMAT_YUV422I_UYVY) == 0) {
uyvy_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
- } else if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ } else if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
yuyv_to_yuv(row_tmp, (uint32_t*)row_src, out_width - right_crop);
}
@@ -509,7 +511,7 @@ size_t Encoder_libjpeg::encode(params* input) {
row_src = row_src + out_width*bpp;
// move uv row if input format needs it
- if (strcmp(input->format, CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if (strcmp(input->format, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
if (!(cinfo.next_scanline % 2))
row_uv = row_uv + out_width * bpp;
}
@@ -529,4 +531,5 @@ size_t Encoder_libjpeg::encode(params* input) {
return dest_mgr.jpegsize;
}
-} // namespace android
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/MemoryManager.cpp b/camera/MemoryManager.cpp
index f9c12c1..d466f52 100644
--- a/camera/MemoryManager.cpp
+++ b/camera/MemoryManager.cpp
@@ -25,7 +25,8 @@ extern "C" {
};
-namespace android {
+namespace Ti {
+namespace Camera {
///@todo Move these constants to a common header file, preferably in tiler.h
#define STRIDE_8BIT (4 * 1024)
@@ -223,7 +224,8 @@ status_t MemoryManager::setErrorHandler(ErrorNotifier *errorNotifier)
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------MemoryManager Class ENDS here-----------------------------*/
diff --git a/camera/OMXCameraAdapter/OMX3A.cpp b/camera/OMXCameraAdapter/OMX3A.cpp
index 79e6aac..2a0b111 100644
--- a/camera/OMXCameraAdapter/OMX3A.cpp
+++ b/camera/OMXCameraAdapter/OMX3A.cpp
@@ -31,7 +31,9 @@
static const char PARAM_SEP[] = ",";
-namespace android {
+namespace Ti {
+namespace Camera {
+
const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
OMX_SCENEMODETYPE scene) {
const SceneModesEntry* cameraLUT = NULL;
@@ -62,7 +64,7 @@ const SceneModesEntry* OMXCameraAdapter::getSceneModeEntry(const char* name,
return entry;
}
-status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
+status_t OMXCameraAdapter::setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -73,9 +75,9 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
- str = params.get(CameraParameters::KEY_SCENE_MODE);
+ str = params.get(android::CameraParameters::KEY_SCENE_MODE);
mode = getLUTvalue_HALtoOMX( str, SceneLUT);
if ( mFirstTimeInit || ((str != NULL) && ( mParameters3A.SceneMode != mode )) ) {
if ( 0 <= mode ) {
@@ -148,7 +150,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_WHITE_BALANCE);
+ str = params.get(android::CameraParameters::KEY_WHITE_BALANCE);
mode = getLUTvalue_HALtoOMX( str, WBalLUT);
if (mFirstTimeInit || ((str != NULL) && (mode != mParameters3A.WhiteBallance)))
{
@@ -208,7 +210,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_ANTIBANDING);
+ str = params.get(android::CameraParameters::KEY_ANTIBANDING);
mode = getLUTvalue_HALtoOMX(str,FlickerLUT);
if ( mFirstTimeInit || ( ( str != NULL ) && ( mParameters3A.Flicker != mode ) ))
{
@@ -233,7 +235,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_FOCUS_MODE);
+ str = params.get(android::CameraParameters::KEY_FOCUS_MODE);
mode = getLUTvalue_HALtoOMX(str, FocusLUT);
if ( (mFirstTimeInit || ((str != NULL) && (mParameters3A.Focus != mode))))
{
@@ -249,20 +251,20 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGDB("Focus %x", mParameters3A.Focus);
}
- str = params.get(CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ str = params.get(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
if ( mFirstTimeInit ||
(( str != NULL ) &&
(mParameters3A.EVCompensation !=
- params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION))))
+ params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION))))
{
CAMHAL_LOGDB("Setting EV Compensation to %d",
- params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION));
+ params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION));
- mParameters3A.EVCompensation = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ mParameters3A.EVCompensation = params.getInt(android::CameraParameters::KEY_EXPOSURE_COMPENSATION);
mPending3Asettings |= SetEVCompensation;
}
- str = params.get(CameraParameters::KEY_FLASH_MODE);
+ str = params.get(android::CameraParameters::KEY_FLASH_MODE);
mode = getLUTvalue_HALtoOMX( str, FlashLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.FlashMode != mode )) )
{
@@ -280,7 +282,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
CAMHAL_LOGVB("Flash Setting %s", str);
CAMHAL_LOGVB("FlashMode %d", mParameters3A.FlashMode);
- str = params.get(CameraParameters::KEY_EFFECT);
+ str = params.get(android::CameraParameters::KEY_EFFECT);
mode = getLUTvalue_HALtoOMX( str, EffLUT);
if ( mFirstTimeInit || (( str != NULL ) && ( mParameters3A.Effect != mode )) )
{
@@ -292,13 +294,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, CameraParameters::TRUE)) )
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetExpLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
- if (str && ((strcmp(str, CameraParameters::TRUE)) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking Exposure");
lock = OMX_TRUE;
@@ -317,13 +319,13 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
}
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
- if ( (str != NULL) && (!strcmp(str, CameraParameters::TRUE)) )
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED);
+ if ( (str != NULL) && (!strcmp(str, android::CameraParameters::TRUE)) )
{
OMX_BOOL lock = OMX_FALSE;
mUserSetWbLock = OMX_FALSE;
- str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
- if (str && ((strcmp(str, CameraParameters::TRUE)) == 0))
+ str = params.get(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ if (str && ((strcmp(str, android::CameraParameters::TRUE)) == 0))
{
CAMHAL_LOGVA("Locking WhiteBalance");
lock = OMX_TRUE;
@@ -342,24 +344,24 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
}
str = params.get(TICameraParameters::KEY_AUTO_FOCUS_LOCK);
- if (str && (strcmp(str, CameraParameters::TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
+ if (str && (strcmp(str, android::CameraParameters::TRUE) == 0) && (mParameters3A.FocusLock != OMX_TRUE)) {
CAMHAL_LOGVA("Locking Focus");
mParameters3A.FocusLock = OMX_TRUE;
setFocusLock(mParameters3A);
- } else if (str && (strcmp(str, CameraParameters::FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
+ } else if (str && (strcmp(str, android::CameraParameters::FALSE) == 0) && (mParameters3A.FocusLock != OMX_FALSE)) {
CAMHAL_LOGVA("UnLocking Focus");
mParameters3A.FocusLock = OMX_FALSE;
setFocusLock(mParameters3A);
}
- str = params.get(CameraParameters::KEY_METERING_AREAS);
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
if ( (str != NULL) ) {
size_t MAX_METERING_AREAS;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
- MAX_METERING_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_METERING_AREAS));
+ MAX_METERING_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_METERING_AREAS));
- Mutex::Autolock lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -372,7 +374,7 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
if ( MAX_METERING_AREAS >= mMeteringAreas.size() ) {
CAMHAL_LOGDB("Setting Metering Areas %s",
- params.get(CameraParameters::KEY_METERING_AREAS));
+ params.get(android::CameraParameters::KEY_METERING_AREAS));
mPending3Asettings |= SetMeteringAreas;
} else {
@@ -401,14 +403,14 @@ status_t OMXCameraAdapter::setParameters3A(const CameraParameters &params,
return ret;
}
-void OMXCameraAdapter::declareParameter3ABool(const CameraParameters &params, const char *key,
+void OMXCameraAdapter::declareParameter3ABool(const android::CameraParameters &params, const char *key,
OMX_BOOL &current_setting, E3ASettingsFlags pending,
const char *msg)
{
OMX_BOOL val = OMX_TRUE;
const char *str = params.get(key);
- if (str && ((strcmp(str, CameraParameters::FALSE)) == 0))
+ if (str && ((strcmp(str, android::CameraParameters::FALSE)) == 0))
{
CAMHAL_LOGVB("Disabling %s", msg);
val = OMX_FALSE;
@@ -496,7 +498,7 @@ status_t OMXCameraAdapter::setExposureMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
static bool isFlashDisabled() {
@@ -513,7 +515,7 @@ static bool isFlashDisabled() {
char value[PROPERTY_VALUE_MAX];
if (property_get("camera.flash_off", value, NULL) &&
- (!strcasecmp(value, CameraParameters::TRUE) || !strcasecmp(value, "1"))) {
+ (!strcasecmp(value, android::CameraParameters::TRUE) || !strcasecmp(value, "1"))) {
CAMHAL_LOGW("flash is disabled for testing purpose");
return true;
}
@@ -548,7 +550,7 @@ status_t OMXCameraAdapter::setManualExposureVal(Gen3A_settings& Gen3A) {
}
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
if ( Gen3A.Exposure != OMX_ExposureControlOff ) {
@@ -584,7 +586,7 @@ status_t OMXCameraAdapter::setManualExposureVal(Gen3A_settings& Gen3A) {
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
@@ -653,7 +655,7 @@ status_t OMXCameraAdapter::setFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
@@ -685,7 +687,7 @@ status_t OMXCameraAdapter::getFlashMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
@@ -794,7 +796,7 @@ status_t OMXCameraAdapter::setFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
@@ -825,7 +827,7 @@ status_t OMXCameraAdapter::getFocusMode(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
@@ -869,7 +871,7 @@ status_t OMXCameraAdapter::setScene(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
@@ -913,7 +915,7 @@ status_t OMXCameraAdapter::setEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
@@ -944,7 +946,7 @@ status_t OMXCameraAdapter::getEVCompensation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWBMode(Gen3A_settings& Gen3A)
@@ -1053,7 +1055,7 @@ status_t OMXCameraAdapter::setFlicker(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
@@ -1090,7 +1092,7 @@ status_t OMXCameraAdapter::setBrightness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setContrast(Gen3A_settings& Gen3A)
@@ -1173,7 +1175,7 @@ status_t OMXCameraAdapter::setSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
@@ -1204,7 +1206,7 @@ status_t OMXCameraAdapter::getSharpness(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
@@ -1241,7 +1243,7 @@ status_t OMXCameraAdapter::setSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
@@ -1272,7 +1274,7 @@ status_t OMXCameraAdapter::getSaturation(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
@@ -1311,7 +1313,7 @@ status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("OMX_GetConfig error 0x%x (manual exposure values)", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
if( 0 == Gen3A.ISO ) {
@@ -1342,7 +1344,7 @@ status_t OMXCameraAdapter::setISO(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
@@ -1373,7 +1375,7 @@ status_t OMXCameraAdapter::getISO(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
@@ -1410,7 +1412,7 @@ status_t OMXCameraAdapter::setEffect(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
@@ -1442,7 +1444,7 @@ status_t OMXCameraAdapter::setWhiteBalanceLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
@@ -1474,7 +1476,7 @@ status_t OMXCameraAdapter::setExposureLock(Gen3A_settings& Gen3A)
}
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
@@ -1505,7 +1507,7 @@ status_t OMXCameraAdapter::setFocusLock(Gen3A_settings& Gen3A)
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_BOOL toggleFocus)
@@ -1539,7 +1541,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_exp = toggleExp ? CameraParameters::TRUE : CameraParameters::FALSE;
CAMHAL_LOGDA("Exposure Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1585,7 +1586,6 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
else
{
- const char *lock_state_wb = toggleWb ? CameraParameters::TRUE : CameraParameters::FALSE;
CAMHAL_LOGDA("WhiteBalance Lock GetConfig successfull");
/* Apply locks only when not applied already */
@@ -1596,7 +1596,7 @@ status_t OMXCameraAdapter::set3ALock(OMX_BOOL toggleExp, OMX_BOOL toggleWb, OMX_
}
EXIT:
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
@@ -1618,7 +1618,7 @@ status_t OMXCameraAdapter::setMeteringAreas(Gen3A_settings& Gen3A)
return ret;
}
- Mutex::Autolock lock(mMeteringAreasLock);
+ android::AutoMutex lock(mMeteringAreasLock);
if ( OMX_StateInvalid == mComponentState )
{
@@ -1769,7 +1769,7 @@ status_t OMXCameraAdapter::setParameter3ABool(const OMX_INDEXTYPE omx_idx,
LOG_FUNCTION_NAME_EXIT
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setAlgoFixedGamma(Gen3A_settings& Gen3A)
@@ -1810,7 +1810,7 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(m3ASettingsUpdateLock);
+ android::AutoMutex lock(m3ASettingsUpdateLock);
/*
* Scenes have a priority during the process
@@ -1987,4 +1987,5 @@ status_t OMXCameraAdapter::apply3Asettings( Gen3A_settings& Gen3A )
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXAlgo.cpp b/camera/OMXCameraAdapter/OMXAlgo.cpp
index d840079..e96a472 100644
--- a/camera/OMXCameraAdapter/OMXAlgo.cpp
+++ b/camera/OMXCameraAdapter/OMXAlgo.cpp
@@ -27,9 +27,10 @@
#undef TRUE
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -125,7 +126,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
CAMHAL_LOGVB("IPP Mode set %d", ipp);
if (((valstr = params.get(TICameraParameters::KEY_GBCE)) != NULL) ) {
- if (strcmp(valstr, CameraParameters::TRUE ) == 0) {
+ if (strcmp(valstr, android::CameraParameters::TRUE ) == 0) {
gbce = BRIGHTNESS_ON;
} else {
gbce = BRIGHTNESS_OFF;
@@ -143,7 +144,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
if ( ( valstr = params.get(TICameraParameters::KEY_GLBCE) ) != NULL ) {
- if (strcmp(valstr, CameraParameters::TRUE) == 0) {
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0) {
glbce = BRIGHTNESS_ON;
} else {
glbce = BRIGHTNESS_OFF;
@@ -174,7 +175,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
///Set VNF Configuration
bool vnfEnabled = false;
valstr = params.get(TICameraParameters::KEY_VNF);
- if (valstr && strcmp(valstr, CameraParameters::TRUE) == 0)
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
CAMHAL_LOGDA("VNF Enabled");
vnfEnabled = true;
@@ -194,8 +195,8 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
///Set VSTAB Configuration
bool vstabEnabled = false;
- valstr = params.get(CameraParameters::KEY_VIDEO_STABILIZATION);
- if (valstr && strcmp(valstr, CameraParameters::TRUE) == 0) {
+ valstr = params.get(android::CameraParameters::KEY_VIDEO_STABILIZATION);
+ if (valstr && strcmp(valstr, android::CameraParameters::TRUE) == 0) {
CAMHAL_LOGDA("VSTAB Enabled");
vstabEnabled = true;
}
@@ -239,7 +240,7 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
//Set Mechanical Misalignment Correction
valstr = params.get(TICameraParameters::KEY_MECHANICAL_MISALIGNMENT_CORRECTION);
if ( valstr != NULL ) {
- setMechanicalMisalignmentCorrection(strcmp(valstr, CameraParameters::TRUE) == 0);
+ setMechanicalMisalignmentCorrection(strcmp(valstr, android::CameraParameters::TRUE) == 0);
CAMHAL_LOGDB("Mechanical Misalignment Correction %s", valstr);
}
}
@@ -250,13 +251,13 @@ status_t OMXCameraAdapter::setParametersAlgo(const CameraParameters &params,
}
// Set AutoConvergence
-status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *pValManualstr, const CameraParameters &params)
+status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *pValManualstr, const android::CameraParameters &params)
{
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMX_TI_CONFIG_CONVERGENCETYPE ACParams;
const char *str = NULL;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
int mode;
int changed = 0;
@@ -287,15 +288,15 @@ status_t OMXCameraAdapter::setAutoConvergence(const char *pValstr, const char *p
}
if ( OMX_TI_AutoConvergenceModeFocusFaceTouch == mAutoConv ) {
- Mutex::Autolock lock(mTouchAreasLock);
+ android::AutoMutex lock(mTouchAreasLock);
- str = params.get((const char *)CameraParameters::KEY_METERING_AREAS);
+ str = params.get(android::CameraParameters::KEY_METERING_AREAS);
if ( NULL != str ) {
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
} else {
CAMHAL_LOGEB("Touch areas not received in %s",
- CameraParameters::KEY_METERING_AREAS);
+ android::CameraParameters::KEY_METERING_AREAS);
LOG_FUNCTION_NAME_EXIT;
return BAD_VALUE;
}
@@ -688,7 +689,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring camera mode 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -703,7 +704,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
&singlePrevMode);
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
} else {
CAMHAL_LOGDA("single preview mode configured successfully");
}
@@ -719,7 +720,7 @@ status_t OMXCameraAdapter::setCaptureMode(OMXCameraAdapter::CaptureMode mode)
if ( OMX_ErrorNone != eError )
{
CAMHAL_LOGEB("Error while configuring CAC 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
}
else
{
@@ -1027,7 +1028,7 @@ status_t OMXCameraAdapter::setAlgoPriority(AlgoPriority priority,
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::setPictureRotation(unsigned int degree)
@@ -1217,4 +1218,5 @@ status_t OMXCameraAdapter::setMechanicalMisalignmentCorrection(const bool enable
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
index 575b33b..c504afb 100644
--- a/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
+++ b/camera/OMXCameraAdapter/OMXCameraAdapter.cpp
@@ -35,7 +35,8 @@ static int mDebugFcs = 0;
#define HERE(Msg) {CAMHAL_LOGEB("--===line %d, %s===--\n", __LINE__, Msg);}
-namespace android {
+namespace Ti {
+namespace Camera {
#ifdef CAMERAHAL_OMX_PROFILING
@@ -46,7 +47,7 @@ const char OMXCameraAdapter::DEFAULT_PROFILE_PATH[] = "/data/dbg/profile_data.bi
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-Mutex gAdapterLock;
+android::Mutex gAdapterLock;
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
@@ -100,15 +101,15 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
mOmxInitialized = true;
// Initialize the callback handles
OMX_CALLBACKTYPE callbacks;
- callbacks.EventHandler = android::OMXCameraAdapterEventHandler;
- callbacks.EmptyBufferDone = android::OMXCameraAdapterEmptyBufferDone;
- callbacks.FillBufferDone = android::OMXCameraAdapterFillBufferDone;
+ callbacks.EventHandler = Camera::OMXCameraAdapterEventHandler;
+ callbacks.EmptyBufferDone = Camera::OMXCameraAdapterEmptyBufferDone;
+ callbacks.FillBufferDone = Camera::OMXCameraAdapterFillBufferDone;
///Get the handle to the OMX Component
eError = OMXCameraAdapter::OMXCameraGetHandle(&mCameraAdapterParameters.mHandleComp, this, callbacks);
@@ -275,7 +276,7 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mCommandHandler->run("CallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mCommandHandler->run("CallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
@@ -297,7 +298,7 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
return NO_MEMORY;
}
- ret = mOMXCallbackHandler->run("OMXCallbackThread", PRIORITY_URGENT_DISPLAY);
+ ret = mOMXCallbackHandler->run("OMXCallbackThread", android::PRIORITY_URGENT_DISPLAY);
if ( ret != NO_ERROR )
{
if( ret == INVALID_OPERATION){
@@ -365,14 +366,14 @@ status_t OMXCameraAdapter::initialize(CameraProperties::Properties* caps)
mParameters3A.AlgoGIC = OMX_TRUE;
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
CAMHAL_LOGDB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
void OMXCameraAdapter::performCleanupAfterError()
@@ -469,7 +470,7 @@ status_t OMXCameraAdapter::fillThisBuffer(CameraBuffer * frameBuf, CameraFrame::
for ( int i = 0 ; i < port->mNumBufs ; i++) {
if ((CameraBuffer *) port->mBufferHeader[i]->pAppPrivate == frameBuf) {
if ( isCaptureFrame && !mBracketingEnabled ) {
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
if (mBurstFramesQueued >= mBurstFramesAccum) {
port->mStatus[i] = OMXCameraPortParameters::IDLE;
return NO_ERROR;
@@ -498,7 +499,7 @@ EXIT:
//Since fillthisbuffer is called asynchronously, make sure to signal error to the app
mErrorNotifier->errorNotify(CAMERA_ERROR_HARD);
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
void OMXCameraAdapter::setParamS3D(OMX_U32 port, const char *valstr)
@@ -539,7 +540,7 @@ void OMXCameraAdapter::setParamS3D(OMX_U32 port, const char *valstr)
LOG_FUNCTION_NAME_EXIT;
}
-status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
+status_t OMXCameraAdapter::setParameters(const android::CameraParameters &params)
{
LOG_FUNCTION_NAME;
@@ -555,12 +556,12 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
///@todo Include more camera parameters
if ( (valstr = params.getPreviewFormat()) != NULL ) {
- if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
- strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420P) == 0 ||
+ strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420PackedSemiPlanar;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
} else {
@@ -645,11 +646,11 @@ status_t OMXCameraAdapter::setParameters(const CameraParameters &params)
if ( (valstr = params.get(TICameraParameters::KEY_MEASUREMENT_ENABLE)) != NULL )
{
- if (strcmp(valstr, CameraParameters::TRUE) == 0)
+ if (strcmp(valstr, android::CameraParameters::TRUE) == 0)
{
mMeasurementEnabled = true;
}
- else if (strcmp(valstr, CameraParameters::FALSE) == 0)
+ else if (strcmp(valstr, android::CameraParameters::FALSE) == 0)
{
mMeasurementEnabled = false;
}
@@ -761,7 +762,7 @@ static status_t saveBufferToFile(const void *buf, int size, const char *filename
#endif
-void OMXCameraAdapter::getParameters(CameraParameters& params)
+void OMXCameraAdapter::getParameters(android::CameraParameters& params)
{
status_t ret = NO_ERROR;
OMX_CONFIG_EXPOSUREVALUETYPE exp;
@@ -786,36 +787,36 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
valstr = getLUTvalue_OMXtoHAL(mParameters3A.WhiteBallance, WBalLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_WHITE_BALANCE);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_WHITE_BALANCE , valstr);
+ params.set(android::CameraParameters::KEY_WHITE_BALANCE , valstr);
valstr = getLUTvalue_OMXtoHAL(mParameters3A.FlashMode, FlashLUT);
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FLASH_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FLASH_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FLASH_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FLASH_MODE, valstr);
if ((mParameters3A.Focus == OMX_IMAGE_FocusControlAuto) &&
(mCapMode != OMXCameraAdapter::VIDEO_MODE)) {
- valstr = CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
+ valstr = android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE;
} else {
valstr = getLUTvalue_OMXtoHAL(mParameters3A.Focus, FocusLUT);
}
- valstr_supported = mParams.get(CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
+ valstr_supported = mParams.get(android::CameraParameters::KEY_SUPPORTED_FOCUS_MODES);
if (valstr && valstr_supported && strstr(valstr_supported, valstr))
- params.set(CameraParameters::KEY_FOCUS_MODE, valstr);
+ params.set(android::CameraParameters::KEY_FOCUS_MODE, valstr);
}
//Query focus distances only when focus is running
if ( ( AF_ACTIVE & state ) ||
- ( NULL == mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES) ) )
+ ( NULL == mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES) ) )
{
updateFocusDistances(params);
}
else
{
- params.set(CameraParameters::KEY_FOCUS_DISTANCES,
- mParameters.get(CameraParameters::KEY_FOCUS_DISTANCES));
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES,
+ mParameters.get(android::CameraParameters::KEY_FOCUS_DISTANCES));
}
OMX_INIT_STRUCT_PTR (&exp, OMX_CONFIG_EXPOSUREVALUETYPE);
@@ -834,7 +835,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
{
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ZOOM_ACTIVE & state )
{
@@ -842,7 +843,7 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
{
mZoomParameterIdx += mZoomInc;
}
- params.set( CameraParameters::KEY_ZOOM, mZoomParameterIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mZoomParameterIdx);
if ( ( mCurrentZoomIdx == mTargetZoomIdx ) &&
( mZoomParameterIdx == mCurrentZoomIdx ) )
{
@@ -869,39 +870,39 @@ void OMXCameraAdapter::getParameters(CameraParameters& params)
}
else
{
- params.set( CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
+ params.set(android::CameraParameters::KEY_ZOOM, mCurrentZoomIdx);
}
}
//Populate current lock status
if ( mUserSetExpLock || mParameters3A.ExposureLock ) {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::TRUE);
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_EXPOSURE_LOCK,
+ android::CameraParameters::FALSE);
}
if ( mUserSetWbLock || mParameters3A.WhiteBalanceLock ) {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::TRUE);
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::TRUE);
} else {
- params.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
- CameraParameters::FALSE);
+ params.set(android::CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK,
+ android::CameraParameters::FALSE);
}
// Update Picture size capabilities dynamically
- params.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ params.set(android::CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
mCapabilities->get(CameraProperties::SUPPORTED_PICTURE_SIZES));
// Update framerate capabilities dynamically
- params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES));
params.set(TICameraParameters::KEY_FRAMERATES_EXT_SUPPORTED,
mCapabilities->get(CameraProperties::SUPPORTED_PREVIEW_FRAME_RATES_EXT));
- params.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ params.set(android::CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
mCapabilities->get(CameraProperties::FRAMERATE_RANGE_SUPPORTED));
params.set(TICameraParameters::KEY_FRAMERATE_RANGES_EXT_SUPPORTED,
@@ -1038,7 +1039,7 @@ status_t OMXCameraAdapter::setSensorQuirks(int orientation,
if ( eError != OMX_ErrorNone ) {
CAMHAL_LOGEB("OMX_GetParameter - %x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
if ( ( orientation == 90 ) || ( orientation == 270 ) ) {
@@ -1240,7 +1241,7 @@ status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &port
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
EXIT:
@@ -1248,7 +1249,7 @@ status_t OMXCameraAdapter::setFormat(OMX_U32 port, OMXCameraPortParameters &port
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::flushBuffers(OMX_U32 nPort)
@@ -1325,13 +1326,13 @@ status_t OMXCameraAdapter::flushBuffers(OMX_U32 nPort)
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
///API to give the buffers to Adapter
@@ -1385,7 +1386,7 @@ status_t OMXCameraAdapter::UseBuffersPreviewData(CameraBuffer * bufArr, int num)
status_t ret = NO_ERROR;
OMX_ERRORTYPE eError = OMX_ErrorNone;
OMXCameraPortParameters * measurementData = NULL;
- Mutex::Autolock lock( mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
LOG_FUNCTION_NAME;
@@ -1489,13 +1490,13 @@ EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::switchToExecuting()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
@@ -1606,7 +1607,7 @@ status_t OMXCameraAdapter::doSwitchToExecuting()
performCleanupAfterError();
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::switchToIdle() {
@@ -1615,7 +1616,7 @@ status_t OMXCameraAdapter::switchToIdle() {
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mIdleStateSwitchLock);
+ android::AutoMutex lock(mIdleStateSwitchLock);
if ( mComponentState == OMX_StateIdle || mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
CAMHAL_LOGDA("Already in OMX_StateIdle, OMX_Loaded state or OMX_StateInvalid state");
@@ -1690,7 +1691,7 @@ EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
@@ -1752,13 +1753,13 @@ status_t OMXCameraAdapter::prevPortEnable() {
}
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
@@ -1767,7 +1768,7 @@ status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mStateSwitchLock);
+ android::AutoMutex lock(mStateSwitchLock);
if ( mComponentState == OMX_StateLoaded || mComponentState == OMX_StateInvalid) {
CAMHAL_LOGDA("Already in OMX_Loaded state or OMX_StateInvalid state");
return NO_ERROR;
@@ -1839,13 +1840,13 @@ status_t OMXCameraAdapter::switchToLoaded(bool bPortEnableRequired) {
prevPortEnable();
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
@@ -2105,7 +2106,7 @@ status_t OMXCameraAdapter::UseBuffersPreview(CameraBuffer * bufArr, int num)
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
///If there is any failure, we reach here.
///Here, we do any resource freeing and convert from OMX error code to Camera Hal error code
@@ -2118,7 +2119,7 @@ EXIT:
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::startPreview()
@@ -2261,7 +2262,7 @@ status_t OMXCameraAdapter::startPreview()
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
@@ -2270,7 +2271,7 @@ status_t OMXCameraAdapter::startPreview()
mStateSwitchLock.unlock();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
@@ -2296,7 +2297,7 @@ status_t OMXCameraAdapter::destroyTunnel()
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
// we should wait for the first frame to come before trying to stopPreview...if not
// we might put OMXCamera in a bad state (IDLE->LOADED timeout). Seeing this a lot
// after a capture
@@ -2316,7 +2317,7 @@ status_t OMXCameraAdapter::destroyTunnel()
}
{
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
@@ -2364,18 +2365,18 @@ status_t OMXCameraAdapter::destroyTunnel()
mTunnelDestroyed = true;
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
{
- Mutex::Autolock lock(mPreviewBufferLock);
+ android::AutoMutex lock(mPreviewBufferLock);
///Clear all the available preview buffers
mPreviewBuffersAvailable.clear();
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
@@ -2426,14 +2427,14 @@ status_t OMXCameraAdapter::stopPreview() {
}
{
- Mutex::Autolock lock(mPreviewDataBufferLock);
+ android::AutoMutex lock(mPreviewDataBufferLock);
mPreviewDataBuffersAvailable.clear();
}
}
{
- Mutex::Autolock lock(mPreviewBufferLock);
+ android::AutoMutex lock(mPreviewBufferLock);
///Clear all the available preview buffers
mPreviewBuffersAvailable.clear();
}
@@ -2449,18 +2450,18 @@ status_t OMXCameraAdapter::stopPreview() {
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
{
- Mutex::Autolock lock(mPreviewBufferLock);
+ android::AutoMutex lock(mPreviewBufferLock);
///Clear all the available preview buffers
mPreviewBuffersAvailable.clear();
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
@@ -2505,7 +2506,7 @@ status_t OMXCameraAdapter::setSensorOverclock(bool enable)
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::printComponentVersion(OMX_HANDLETYPE handle)
@@ -2658,12 +2659,12 @@ status_t OMXCameraAdapter::setS3DFrameLayout(OMX_U32 port) const
status_t OMXCameraAdapter::autoFocus()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -2689,12 +2690,12 @@ status_t OMXCameraAdapter::autoFocus()
status_t OMXCameraAdapter::takePicture()
{
status_t ret = NO_ERROR;
- TIUTILS::Message msg;
+ Utils::Message msg;
LOG_FUNCTION_NAME;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
if (mFrameCount < 1) {
// first frame may time some time to come...so wait for an adequate amount of time
// which 2 * OMX_CAPTURE_TIMEOUT * 1000 will cover.
@@ -3031,10 +3032,10 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterEventHandler(OMX_IN OMX_HANDLETY
{
CAMHAL_LOGEB("***Removing %d EVENTS***** \n", mEventSignalQ.size());
//remove from queue and free msg
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
if ( sem )
{
sem->Signal();
@@ -3093,8 +3094,8 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
bool eventSignalled = false;
LOG_FUNCTION_NAME;
@@ -3113,7 +3114,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
//Signal the semaphore provided
@@ -3134,7 +3135,7 @@ OMX_ERRORTYPE OMXCameraAdapter::SignalEvent(OMX_IN OMX_HANDLETYPE hComponent,
// Handling for focus callback
if ((nData2 == OMX_IndexConfigCommonFocusStatus) &&
(eEvent == (OMX_EVENTTYPE) OMX_EventIndexSettingChanged)) {
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::CAMERA_FOCUS_STATUS;
msg.arg1 = NULL;
msg.arg2 = NULL;
@@ -3153,8 +3154,8 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_U32 nData2,
OMX_IN OMX_PTR pEventData)
{
- Mutex::Autolock lock(mEventLock);
- TIUTILS::Message *msg;
+ android::AutoMutex lock(mEventLock);
+ Utils::Message *msg;
LOG_FUNCTION_NAME;
if ( !mEventSignalQ.isEmpty() )
@@ -3171,7 +3172,7 @@ OMX_ERRORTYPE OMXCameraAdapter::RemoveEvent(OMX_IN OMX_HANDLETYPE hComponent,
&& ( !msg->arg2 || ( OMX_U32 ) msg->arg2 == nData2 )
&& msg->arg3)
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
CAMHAL_LOGDA("Event matched, signalling sem");
mEventSignalQ.removeAt(i);
free(msg);
@@ -3194,14 +3195,14 @@ status_t OMXCameraAdapter::RegisterForEvent(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore)
+ OMX_IN Utils::Semaphore &semaphore)
{
status_t ret = NO_ERROR;
ssize_t res;
- Mutex::Autolock lock(mEventLock);
+ android::AutoMutex lock(mEventLock);
LOG_FUNCTION_NAME;
- TIUTILS::Message * msg = ( struct TIUTILS::Message * ) malloc(sizeof(struct TIUTILS::Message));
+ Utils::Message * msg = ( struct Utils::Message * ) malloc(sizeof(struct Utils::Message));
if ( NULL != msg )
{
msg->command = ( unsigned int ) eEvent;
@@ -3306,7 +3307,7 @@ OMX_ERRORTYPE OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLETYPE hComponent,
OMX_IN OMX_PTR pAppData,
OMX_IN OMX_BUFFERHEADERTYPE* pBuffHeader)
{
- TIUTILS::Message msg;
+ Utils::Message msg;
OMX_ERRORTYPE eError = OMX_ErrorNone;
if (UNLIKELY(mDebugFps)) {
@@ -3383,7 +3384,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
BaseCameraAdapter::AdapterState state, nextState;
BaseCameraAdapter::getState(state);
BaseCameraAdapter::getNextState(nextState);
- sp<CameraMetadataResult> metadataResult = NULL;
+ android::sp<CameraMetadataResult> metadataResult = NULL;
unsigned int mask = 0xFFFF;
CameraFrame cameraFrame;
OMX_OTHER_EXTRADATATYPE *extraData;
@@ -3512,7 +3513,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
{
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
if ( mFDSwitchAlgoPriority ) {
//Disable region priority and enable face priority for AF
@@ -3561,7 +3562,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
} else if ( pixFormat == OMX_COLOR_FormatCbYCrY &&
((mPictureFormatFromClient &&
!strcmp(mPictureFormatFromClient,
- CameraParameters::PIXEL_FORMAT_JPEG)) ||
+ android::CameraParameters::PIXEL_FORMAT_JPEG)) ||
!mPictureFormatFromClient) ) {
// signals to callbacks that this needs to be coverted to jpeg
// before returning to framework
@@ -3595,7 +3596,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
doBracketing(pBuffHeader, typeOfFrame);
@@ -3664,7 +3665,7 @@ OMX_ERRORTYPE OMXCameraAdapter::OMXCameraAdapterFillBufferDone(OMX_IN OMX_HANDLE
typeOfFrame = CameraFrame::RAW_FRAME;
pPortParam->mImageType = typeOfFrame;
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
if( ( CAPTURE_ACTIVE & state ) != CAPTURE_ACTIVE ) {
goto EXIT;
}
@@ -3744,7 +3745,7 @@ status_t OMXCameraAdapter::recalculateFPS()
float currentFPS;
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount++;
if (mFrameCount == 1) {
mFirstFrameCondition.broadcast();
@@ -3794,7 +3795,7 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
return -EINVAL;
}
- Mutex::Autolock lock(mSubscriberLock);
+ android::AutoMutex lock(mSubscriberLock);
//frame.mFrameType = typeOfFrame;
frame.mFrameMask = mask;
@@ -3832,7 +3833,7 @@ status_t OMXCameraAdapter::sendCallBacks(CameraFrame frame, OMX_IN OMX_BUFFERHEA
bool OMXCameraAdapter::CommandHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t stat;
ErrorNotifier *errorNotify = NULL;
@@ -3843,9 +3844,9 @@ bool OMXCameraAdapter::CommandHandler::Handler()
{
stat = NO_ERROR;
CAMHAL_LOGDA("Handler: waiting for messsage...");
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
}
CAMHAL_LOGDB("msg.command = %d", msg.command);
@@ -3894,16 +3895,16 @@ bool OMXCameraAdapter::CommandHandler::Handler()
bool OMXCameraAdapter::OMXCallbackHandler::Handler()
{
- TIUTILS::Message msg;
+ Utils::Message msg;
volatile int forever = 1;
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
while(forever){
- TIUTILS::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
+ Utils::MessageQueue::waitForMsg(&mCommandMsgQ, NULL, NULL, -1);
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.get(&msg);
mIsProcessed = false;
}
@@ -3955,7 +3956,7 @@ void OMXCameraAdapter::OMXCallbackHandler::flush()
{
LOG_FUNCTION_NAME;
- AutoMutex locker(mLock);
+ android::AutoMutex locker(mLock);
CAMHAL_UNUSED(locker);
if ( mIsProcessed )
@@ -3993,7 +3994,7 @@ status_t OMXCameraAdapter::setExtraData(bool enable, OMX_U32 nPortIndex, OMX_EXT
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
OMX_OTHER_EXTRADATATYPE *OMXCameraAdapter::getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const
@@ -4114,7 +4115,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
if ( mOmxInitialized ) {
// return to OMX Loaded state
@@ -4141,11 +4142,11 @@ OMXCameraAdapter::~OMXCameraAdapter()
{
for (unsigned int i = 0 ; i < mEventSignalQ.size() ; i++ )
{
- TIUTILS::Message *msg = mEventSignalQ.itemAt(i);
+ Utils::Message *msg = mEventSignalQ.itemAt(i);
//remove from queue and free msg
if ( NULL != msg )
{
- Semaphore *sem = (Semaphore*) msg->arg3;
+ Utils::Semaphore *sem = (Utils::Semaphore*) msg->arg3;
sem->Signal();
free(msg);
@@ -4157,7 +4158,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to command handling thread
if ( NULL != mCommandHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = CommandHandler::COMMAND_EXIT;
msg.arg1 = mErrorNotifier;
mCommandHandler->clearCommandQ();
@@ -4169,7 +4170,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
//Exit and free ref to callback handling thread
if ( NULL != mOMXCallbackHandler.get() )
{
- TIUTILS::Message msg;
+ Utils::Message msg;
msg.command = OMXCallbackHandler::COMMAND_EXIT;
//Clear all messages pending first
mOMXCallbackHandler->clearCommandQ();
@@ -4184,7 +4185,7 @@ OMXCameraAdapter::~OMXCameraAdapter()
extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
LOG_FUNCTION_NAME;
@@ -4264,7 +4265,7 @@ public:
if ( sendCommandError != OMX_ErrorNone )
{
CAMHAL_LOGE("Failed disabling all ports, error: 0x%x", sendCommandError);
- return ErrorUtils::omxToAndroidError(sendCommandError);
+ return Utils::ErrorUtils::omxToAndroidError(sendCommandError);
}
CAMHAL_LOGD("Waiting for disabling all ports will be finished...");
@@ -4308,7 +4309,7 @@ public:
if ( switchError != OMX_ErrorNone )
{
CAMHAL_LOGE("Failed switching to state 0x%x, error: 0x%x", mState, switchError);
- return ErrorUtils::omxToAndroidError(switchError);
+ return Utils::ErrorUtils::omxToAndroidError(switchError);
}
// wait for the event for 3 seconds
@@ -4568,7 +4569,7 @@ extern "C" status_t OMXCameraAdapter_Capabilities(
int num_cameras_supported = 0;
OMX_ERRORTYPE eError = OMX_ErrorNone;
- Mutex::Autolock lock(gAdapterLock);
+ android::AutoMutex lock(gAdapterLock);
if (!properties_array) {
CAMHAL_LOGEB("invalid param: properties = 0x%p", properties_array);
@@ -4579,7 +4580,7 @@ extern "C" status_t OMXCameraAdapter_Capabilities(
eError = OMX_Init();
if (eError != OMX_ErrorNone) {
CAMHAL_LOGEB("Error OMX_Init -0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
// Continue selecting sensor and then querying OMX Camera for it's capabilities
@@ -4628,7 +4629,7 @@ extern "C" status_t OMXCameraAdapter_Capabilities(
{
CAMHAL_LOGE("Error: 0x%x", eError);
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
supportedCameras = num_cameras_supported;
@@ -4638,7 +4639,8 @@ extern "C" status_t OMXCameraAdapter_Capabilities(
return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/OMXCameraAdapter/OMXCapabilities.cpp b/camera/OMXCameraAdapter/OMXCapabilities.cpp
index a87c8b5..bd734a1 100644
--- a/camera/OMXCameraAdapter/OMXCapabilities.cpp
+++ b/camera/OMXCameraAdapter/OMXCapabilities.cpp
@@ -26,7 +26,8 @@
#include "ErrorUtils.h"
#include "TICameraParameters.h"
-namespace android {
+namespace Ti {
+namespace Camera {
/************************************
* global constants and variables
@@ -187,12 +188,12 @@ const CapResolution OMXCameraAdapter::mThumbRes [] = {
};
const CapPixelformat OMXCameraAdapter::mPixelformats [] = {
- { OMX_COLOR_FormatCbYCrY, CameraParameters::PIXEL_FORMAT_YUV422I },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420SP },
- { OMX_COLOR_Format16bitRGB565, CameraParameters::PIXEL_FORMAT_RGB565 },
- { OMX_COLOR_FormatYUV420SemiPlanar, CameraParameters::PIXEL_FORMAT_YUV420P },
+ { OMX_COLOR_FormatCbYCrY, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420SP },
+ { OMX_COLOR_Format16bitRGB565, android::CameraParameters::PIXEL_FORMAT_RGB565 },
+ { OMX_COLOR_FormatYUV420SemiPlanar, android::CameraParameters::PIXEL_FORMAT_YUV420P },
{ OMX_COLOR_FormatUnused, TICameraParameters::PIXEL_FORMAT_UNUSED },
- { OMX_COLOR_FormatRawBayer10bit, CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
+ { OMX_COLOR_FormatRawBayer10bit, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB },
};
const userToOMX_LUT OMXCameraAdapter::mFrameLayout [] = {
@@ -209,7 +210,7 @@ const LUTtype OMXCameraAdapter::mLayoutLUT = {
};
const CapCodingFormat OMXCameraAdapter::mImageCodingFormat [] = {
- { OMX_IMAGE_CodingJPEG, CameraParameters::PIXEL_FORMAT_JPEG },
+ { OMX_IMAGE_CodingJPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
{ (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingJPS, TICameraParameters::PIXEL_FORMAT_JPS },
{ (OMX_IMAGE_CODINGTYPE)OMX_TI_IMAGE_CodingMPO, TICameraParameters::PIXEL_FORMAT_MPO },
};
@@ -408,7 +409,7 @@ status_t OMXCameraAdapter::encodePixelformatCap(OMX_COLOR_FORMATTYPE format,
void OMXCameraAdapter::encodeFrameRates(const int minFrameRate, const int maxFrameRate,
const OMX_TI_CAPTYPE & caps, const CapFramerate * const fixedFrameRates,
- const int frameRateCount, Vector<FpsRange> & fpsRanges) {
+ const int frameRateCount, android::Vector<FpsRange> & fpsRanges) {
LOG_FUNCTION_NAME;
if ( minFrameRate == maxFrameRate ) {
@@ -903,11 +904,11 @@ status_t OMXCameraAdapter::insertZoomStages(CameraProperties::Properties* params
params->set(CameraProperties::SUPPORTED_ZOOM_STAGES, zoomStageCount - 1); //As per CTS requirement
if ( 0 == zoomStageCount ) {
- params->set(CameraProperties::ZOOM_SUPPORTED, CameraParameters::FALSE);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, CameraParameters::FALSE);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::FALSE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::FALSE);
} else {
- params->set(CameraProperties::ZOOM_SUPPORTED, CameraParameters::TRUE);
- params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, CameraParameters::TRUE);
+ params->set(CameraProperties::ZOOM_SUPPORTED, android::CameraParameters::TRUE);
+ params->set(CameraProperties::SMOOTH_ZOOM_SUPPORTED, android::CameraParameters::TRUE);
}
LOG_FUNCTION_NAME_EXIT;
@@ -985,7 +986,7 @@ status_t OMXCameraAdapter::insertPreviewFormats(CameraProperties::Properties* pa
if (supported[0] != '\0') {
strncat(supported, PARAM_SEP, 1);
}
- strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
}
@@ -998,7 +999,7 @@ status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params
{
// collect supported normal frame rates
{
- Vector<FpsRange> fpsRanges;
+ android::Vector<FpsRange> fpsRanges;
const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
androidFromDucatiFrameRate(caps.xFramerateMin));
@@ -1062,7 +1063,7 @@ status_t OMXCameraAdapter::insertFramerates(CameraProperties::Properties* params
// collect supported extended frame rates
{
- Vector<FpsRange> fpsRanges;
+ android::Vector<FpsRange> fpsRanges;
const int minFrameRate = max<int>(FPS_MIN * CameraHal::VFR_SCALE,
androidFromDucatiFrameRate(caps.xFramerateMin));
@@ -1396,7 +1397,7 @@ status_t OMXCameraAdapter::insertFocusModes(CameraProperties::Properties* params
if (supported[0] != '\0') {
strncat(supported, PARAM_SEP, 1);
}
- strncat(supported, CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
+ strncat(supported, android::CameraParameters::FOCUS_MODE_INFINITY, MAX_PROP_NAME_LENGTH);
}
params->set(CameraProperties::SUPPORTED_FOCUS_MODES, supported);
@@ -1460,15 +1461,15 @@ status_t OMXCameraAdapter::insertAreas(CameraProperties::Properties* params, OMX
status_t OMXCameraAdapter::insertVNFSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
status_t ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
if ( OMX_TRUE == caps.bVideoNoiseFilterSupported ) {
- params->set(CameraProperties::VNF_SUPPORTED, CameraParameters::TRUE);
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::TRUE);
} else {
- params->set(CameraProperties::VNF_SUPPORTED, CameraParameters::FALSE);
+ params->set(CameraProperties::VNF_SUPPORTED, android::CameraParameters::FALSE);
}
- LOG_FUNCTION_NAME_EXIT
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1476,15 +1477,15 @@ status_t OMXCameraAdapter::insertVNFSupported(CameraProperties::Properties* para
status_t OMXCameraAdapter::insertVSTABSupported(CameraProperties::Properties* params, OMX_TI_CAPTYPE &caps) {
status_t ret = NO_ERROR;
- LOG_FUNCTION_NAME
+ LOG_FUNCTION_NAME;
if ( OMX_TRUE == caps.bVideoStabilizationSupported ) {
- params->set(CameraProperties::VSTAB_SUPPORTED, CameraParameters::TRUE);
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::TRUE);
} else {
- params->set(CameraProperties::VSTAB_SUPPORTED, CameraParameters::FALSE);
+ params->set(CameraProperties::VSTAB_SUPPORTED, android::CameraParameters::FALSE);
}
- LOG_FUNCTION_NAME_EXIT
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -1496,15 +1497,15 @@ status_t OMXCameraAdapter::insertLocks(CameraProperties::Properties* params, OMX
LOG_FUNCTION_NAME
if ( caps.bAELockSupported ) {
- params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, CameraParameters::TRUE);
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
} else {
- params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, CameraParameters::FALSE);
+ params->set(CameraProperties::AUTO_EXPOSURE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
}
if ( caps.bAWBLockSupported ) {
- params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, CameraParameters::TRUE);
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::TRUE);
} else {
- params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, CameraParameters::FALSE);
+ params->set(CameraProperties::AUTO_WHITEBALANCE_LOCK_SUPPORTED, android::CameraParameters::FALSE);
}
LOG_FUNCTION_NAME_EXIT
@@ -1675,7 +1676,7 @@ status_t OMXCameraAdapter::insertMechanicalMisalignmentCorrection(CameraProperti
params->set(CameraProperties::MECHANICAL_MISALIGNMENT_CORRECTION_SUPPORTED,
caps.bMechanicalMisalignmentSupported == OMX_TRUE ?
- CameraParameters::TRUE : CameraParameters::FALSE);
+ android::CameraParameters::TRUE : android::CameraParameters::FALSE);
return OK;
}
@@ -1777,11 +1778,11 @@ status_t OMXCameraAdapter::insertVideoSnapshotSupported(CameraProperties::Proper
if (caps.bStillCapDuringVideoSupported)
{
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, CameraParameters::TRUE);
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::TRUE);
}
else
{
- params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, CameraParameters::FALSE);
+ params->set(CameraProperties::VIDEO_SNAPSHOT_SUPPORTED, android::CameraParameters::FALSE);
}
LOG_FUNCTION_NAME_EXIT;
@@ -1798,10 +1799,10 @@ status_t OMXCameraAdapter::insertGBCESupported(CameraProperties::Properties* par
if (caps.bGbceSupported) {
params->set(CameraProperties::SUPPORTED_GBCE,
- CameraParameters::TRUE);
+ android::CameraParameters::TRUE);
} else {
params->set(CameraProperties::SUPPORTED_GBCE,
- CameraParameters::FALSE);
+ android::CameraParameters::FALSE);
}
LOG_FUNCTION_NAME_EXIT;
@@ -1818,10 +1819,10 @@ status_t OMXCameraAdapter::insertGLBCESupported(CameraProperties::Properties* pa
if (caps.bGlbceSupported) {
params->set(CameraProperties::SUPPORTED_GLBCE,
- CameraParameters::TRUE);
+ android::CameraParameters::TRUE);
} else {
params->set(CameraProperties::SUPPORTED_GLBCE,
- CameraParameters::FALSE);
+ android::CameraParameters::FALSE);
}
LOG_FUNCTION_NAME_EXIT;
@@ -1897,8 +1898,8 @@ status_t OMXCameraAdapter::insertDefaults(CameraProperties::Properties* params,
params->set(CameraProperties::FOCUS_MODE, DEFAULT_FOCUS_MODE);
}
params->set(CameraProperties::IPP, DEFAULT_IPP);
- params->set(CameraProperties::GBCE, CameraParameters::FALSE);
- params->set(CameraProperties::GLBCE, CameraParameters::FALSE);
+ params->set(CameraProperties::GBCE, android::CameraParameters::FALSE);
+ params->set(CameraProperties::GLBCE, android::CameraParameters::FALSE);
params->set(CameraProperties::ISO_MODE, DEFAULT_ISO_MODE);
params->set(CameraProperties::JPEG_QUALITY, DEFAULT_JPEG_QUALITY);
params->set(CameraProperties::JPEG_THUMBNAIL_QUALITY, DEFAULT_THUMBNAIL_QUALITY);
@@ -2453,4 +2454,5 @@ status_t OMXCameraAdapter::getCaps(const int sensorId, CameraProperties::Propert
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXCapture.cpp b/camera/OMXCameraAdapter/OMXCapture.cpp
index 85aefa1..ad5043a 100644
--- a/camera/OMXCameraAdapter/OMXCapture.cpp
+++ b/camera/OMXCameraAdapter/OMXCapture.cpp
@@ -26,9 +26,10 @@
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -69,37 +70,37 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Image: cap.mHeight = %d", (int)cap->mHeight);
if ((valstr = params.getPictureFormat()) != NULL) {
- if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
+ if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV422I) == 0) {
CAMHAL_LOGDA("CbYCrY format selected");
pixFormat = OMX_COLOR_FormatCbYCrY;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV422I;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV422I;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_YUV420SP;
- } else if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_YUV420SP;
+ } else if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_RGB565) == 0) {
CAMHAL_LOGDA("RGB565 format selected");
pixFormat = OMX_COLOR_Format16bitRGB565;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_RGB565;
- } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_RGB565;
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_JPEG) == 0) {
CAMHAL_LOGDA("JPEG format selected");
pixFormat = OMX_COLOR_FormatUnused;
codingMode = CodingJPEG;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_JPEG;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_JPEG;
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_JPS) == 0) {
CAMHAL_LOGDA("JPS format selected");
pixFormat = OMX_COLOR_FormatUnused;
codingMode = CodingJPS;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_JPS;
- } else if (strcmp(valstr, (const char *) TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
+ } else if (strcmp(valstr, TICameraParameters::PIXEL_FORMAT_MPO) == 0) {
CAMHAL_LOGDA("MPO format selected");
pixFormat = OMX_COLOR_FormatUnused;
codingMode = CodingMPO;
mPictureFormatFromClient = TICameraParameters::PIXEL_FORMAT_MPO;
- } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
CAMHAL_LOGDA("RAW Picture format selected");
pixFormat = OMX_COLOR_FormatRawBayer10bit;
- mPictureFormatFromClient = CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
+ mPictureFormatFromClient = android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB;
} else {
CAMHAL_LOGEA("Invalid format, JPEG format selected as default");
pixFormat = OMX_COLOR_FormatUnused;
@@ -146,7 +147,7 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
str = params.get(TICameraParameters::KEY_TEMP_BRACKETING);
if ( ( str != NULL ) &&
- ( strcmp(str, CameraParameters::TRUE) == 0 ) ) {
+ ( strcmp(str, android::CameraParameters::TRUE) == 0 ) ) {
if ( !mBracketingSet ) {
mPendingCaptureSettings |= SetExpBracket;
@@ -207,9 +208,9 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
// If TRUE: Flush queue and abort processing before enqueing
valstr = params.get(TICameraParameters::KEY_FLUSH_SHOT_CONFIG_QUEUE);
if ( NULL != valstr ) {
- if ( 0 == strcmp(valstr, CameraParameters::TRUE) ) {
+ if ( 0 == strcmp(valstr, android::CameraParameters::TRUE) ) {
mFlushShotConfigQueue = true;
- } else if ( 0 == strcmp(valstr, CameraParameters::FALSE) ) {
+ } else if ( 0 == strcmp(valstr, android::CameraParameters::FALSE) ) {
mFlushShotConfigQueue = false;
} else {
CAMHAL_LOGE("Missing flush shot config parameter. Will use current (%s)",
@@ -217,12 +218,12 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
}
}
- if ( params.getInt(CameraParameters::KEY_ROTATION) != -1 )
+ if ( params.getInt(android::CameraParameters::KEY_ROTATION) != -1 )
{
- if (params.getInt(CameraParameters::KEY_ROTATION) != (int) mPictureRotation) {
+ if (params.getInt(android::CameraParameters::KEY_ROTATION) != (int) mPictureRotation) {
mPendingCaptureSettings |= SetRotation;
}
- mPictureRotation = params.getInt(CameraParameters::KEY_ROTATION);
+ mPictureRotation = params.getInt(android::CameraParameters::KEY_ROTATION);
}
else
{
@@ -266,13 +267,13 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Burst Frames set %d", mBurstFrames);
- if ( ( params.getInt(CameraParameters::KEY_JPEG_QUALITY) >= MIN_JPEG_QUALITY ) &&
- ( params.getInt(CameraParameters::KEY_JPEG_QUALITY) <= MAX_JPEG_QUALITY ) )
+ if ( ( params.getInt(android::CameraParameters::KEY_JPEG_QUALITY) >= MIN_JPEG_QUALITY ) &&
+ ( params.getInt(android::CameraParameters::KEY_JPEG_QUALITY) <= MAX_JPEG_QUALITY ) )
{
- if (params.getInt(CameraParameters::KEY_JPEG_QUALITY) != (int) mPictureQuality) {
+ if (params.getInt(android::CameraParameters::KEY_JPEG_QUALITY) != (int) mPictureQuality) {
mPendingCaptureSettings |= SetQuality;
}
- mPictureQuality = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ mPictureQuality = params.getInt(android::CameraParameters::KEY_JPEG_QUALITY);
}
else
{
@@ -282,12 +283,12 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Picture Quality set %d", mPictureQuality);
- if ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >= 0 )
+ if ( params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) >= 0 )
{
- if (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) != (int) mThumbWidth) {
+ if (params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH) != (int) mThumbWidth) {
mPendingCaptureSettings |= SetThumb;
}
- mThumbWidth = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ mThumbWidth = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
}
else
{
@@ -298,12 +299,12 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Picture Thumb width set %d", mThumbWidth);
- if ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >= 0 )
+ if ( params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) >= 0 )
{
- if (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) != (int) mThumbHeight) {
+ if (params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT) != (int) mThumbHeight) {
mPendingCaptureSettings |= SetThumb;
}
- mThumbHeight = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ mThumbHeight = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
}
else
{
@@ -314,13 +315,13 @@ status_t OMXCameraAdapter::setParametersCapture(const CameraParameters &params,
CAMHAL_LOGVB("Picture Thumb height set %d", mThumbHeight);
- if ( ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >= MIN_JPEG_QUALITY ) &&
- ( params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) <= MAX_JPEG_QUALITY ) )
+ if ( ( params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) >= MIN_JPEG_QUALITY ) &&
+ ( params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) <= MAX_JPEG_QUALITY ) )
{
- if (params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) != (int) mThumbQuality) {
+ if (params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY) != (int) mThumbQuality) {
mPendingCaptureSettings |= SetThumb;
}
- mThumbQuality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ mThumbQuality = params.getInt(android::CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
}
else
{
@@ -566,7 +567,7 @@ status_t OMXCameraAdapter::setVectorStop(bool toPreview)
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::initVectorShot()
@@ -622,7 +623,7 @@ status_t OMXCameraAdapter::initVectorShot()
exit:
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::setVectorShot(int *evValues,
@@ -726,7 +727,7 @@ status_t OMXCameraAdapter::setVectorShot(int *evValues,
exit:
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::setExposureBracketing(int *evValues,
@@ -980,7 +981,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
}
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
@@ -999,7 +1000,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
if ( NO_ERROR == ret )
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
mBracketingRange = range;
mBracketingBuffersQueued = new bool[imgCaptureData->mNumBufs];
@@ -1029,7 +1030,7 @@ status_t OMXCameraAdapter::startBracketing(int range)
ret = startImageCapture(true, cap_params);
delete cap_params;
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( NO_ERROR == ret )
{
@@ -1055,7 +1056,7 @@ status_t OMXCameraAdapter::stopBracketing()
ret = stopImageCapture();
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( NULL != mBracketingBuffersQueued )
{
@@ -1082,7 +1083,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mImageCaptureLock);
+ android::AutoMutex lock(mImageCaptureLock);
if(!mCaptureConfigured)
{
@@ -1113,7 +1114,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
//During bracketing image capture is already active
{
- Mutex::Autolock lock(mBracketingLock);
+ android::AutoMutex lock(mBracketingLock);
if ( mBracketingEnabled )
{
//Stop bracketing, activate normal burst for the remaining images
@@ -1204,7 +1205,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
if ((ret == NO_ERROR) && (mBurstFramesQueued > 0)) {
int index = 0;
int queued = 0;
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
if (capParams->mFlushShotConfigQueue) {
// reset shot queue
@@ -1325,7 +1326,7 @@ status_t OMXCameraAdapter::startImageCapture(bool bracketing, CachedCaptureParam
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -1333,7 +1334,7 @@ EXIT:
mCaptureSignalled = false;
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::stopImageCapture()
@@ -1345,7 +1346,7 @@ status_t OMXCameraAdapter::stopImageCapture()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mImageCaptureLock);
+ android::AutoMutex lock(mImageCaptureLock);
if (!mCaptureConfigured) {
//Capture is not ongoing, return from here
@@ -1428,7 +1429,7 @@ status_t OMXCameraAdapter::stopImageCapture()
mCaptureSignalled = true; //set this to true if we exited because of timeout
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
@@ -1451,7 +1452,7 @@ status_t OMXCameraAdapter::stopImageCapture()
flushBuffers(OMX_CAMERA_PORT_IMAGE_OUT_IMAGE);
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -1461,14 +1462,14 @@ EXIT:
}
{
- Mutex::Autolock lock(mFrameCountMutex);
+ android::AutoMutex lock(mFrameCountMutex);
mFrameCount = 0;
mFirstFrameCondition.broadcast();
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::disableImagePort(){
@@ -1571,7 +1572,7 @@ status_t OMXCameraAdapter::disableImagePort(){
#endif
EXIT:
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::initInternalBuffers(OMX_U32 portIndex)
@@ -1663,7 +1664,7 @@ status_t OMXCameraAdapter::deinitInternalBuffers(OMX_U32 portIndex)
return -EINVAL;
}
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
@@ -1868,7 +1869,7 @@ status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
&singlePrevMode);
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while configuring single preview mode 0x%x", eError);
- ret = ErrorUtils::omxToAndroidError(eError);
+ ret = Utils::ErrorUtils::omxToAndroidError(eError);
} else {
CAMHAL_LOGDA("single preview mode configured successfully");
}
@@ -1886,7 +1887,7 @@ status_t OMXCameraAdapter::UseBuffersCapture(CameraBuffer * bufArr, int num)
}
#endif
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -1901,7 +1902,7 @@ EXIT:
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
@@ -1910,7 +1911,7 @@ status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
status_t ret;
OMX_ERRORTYPE eError;
OMXCameraPortParameters * imgRawCaptureData = NULL;
- Semaphore camSem;
+ Utils::Semaphore camSem;
OMXCameraPortParameters cap;
imgRawCaptureData = &mCameraAdapterParameters.mCameraPortParams[mCameraAdapterParameters.mVideoPortIndex];
@@ -2028,4 +2029,5 @@ status_t OMXCameraAdapter::UseBuffersRawCapture(CameraBuffer *bufArr, int num)
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDccDataSave.cpp b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
index c7754a7..7547743 100644
--- a/camera/OMXCameraAdapter/OMXDccDataSave.cpp
+++ b/camera/OMXCameraAdapter/OMXDccDataSave.cpp
@@ -25,7 +25,8 @@
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
status_t OMXCameraAdapter::initDccFileDataSave(OMX_HANDLETYPE* omxHandle, int portIndex)
{
@@ -69,7 +70,7 @@ status_t OMXCameraAdapter::sniffDccFileDataSave(OMX_BUFFERHEADERTYPE* pBuffHeade
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mDccDataLock);
+ android::AutoMutex lock(mDccDataLock);
if ( NULL == pBuffHeader ) {
CAMHAL_LOGEA("Invalid Buffer header");
@@ -305,7 +306,7 @@ status_t OMXCameraAdapter::saveDccFileDataSave()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mDccDataLock);
+ android::AutoMutex lock(mDccDataLock);
if (mDccData.pData)
{
@@ -345,7 +346,7 @@ status_t OMXCameraAdapter::closeDccFileDataSave()
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mDccDataLock);
+ android::AutoMutex lock(mDccDataLock);
if (mDccData.pData) {
free(mDccData.pData);
@@ -356,4 +357,5 @@ status_t OMXCameraAdapter::closeDccFileDataSave()
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXDefaults.cpp b/camera/OMXCameraAdapter/OMXDefaults.cpp
index e0c17d1..2928573 100644
--- a/camera/OMXCameraAdapter/OMXDefaults.cpp
+++ b/camera/OMXCameraAdapter/OMXDefaults.cpp
@@ -24,7 +24,8 @@
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
#define __STRINGIFY(s) __STRING(s)
@@ -60,22 +61,23 @@ const char OMXCameraAdapter::DEFAULT_NUM_PIC_BUFS[] = "1";
const char OMXCameraAdapter::DEFAULT_SATURATION[] = "100";
const char OMXCameraAdapter::DEFAULT_SCENE_MODE[] = "auto";
const char OMXCameraAdapter::DEFAULT_SHARPNESS[] = "100";
-const char * OMXCameraAdapter::DEFAULT_VSTAB = CameraParameters::FALSE;
-const char * OMXCameraAdapter::DEFAULT_VNF = CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_WB[] = "auto";
const char OMXCameraAdapter::DEFAULT_ZOOM[] = "0";
const char OMXCameraAdapter::DEFAULT_MAX_FD_HW_FACES[] = __STRINGIFY(MAX_NUM_FACES_SUPPORTED);
const char OMXCameraAdapter::DEFAULT_MAX_FD_SW_FACES[] = "0";
const char OMXCameraAdapter::DEFAULT_HOR_ANGLE[] = "54.8";
const char OMXCameraAdapter::DEFAULT_VER_ANGLE[] = "42.5";
-const char * OMXCameraAdapter::DEFAULT_AE_LOCK = CameraParameters::FALSE;
-const char * OMXCameraAdapter::DEFAULT_AWB_LOCK = CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_AE_LOCK = android::CameraParameters::FALSE;
+const char * OMXCameraAdapter::DEFAULT_AWB_LOCK = android::CameraParameters::FALSE;
const char OMXCameraAdapter::DEFAULT_VIDEO_SIZE[] = "1920x1080";
const char OMXCameraAdapter::DEFAULT_SENSOR_ORIENTATION[] = "0";
const char OMXCameraAdapter::DEFAULT_AUTOCONVERGENCE_MODE[] = "frame";
const char OMXCameraAdapter::DEFAULT_MANUAL_CONVERGENCE[] = "0";
-const char * OMXCameraAdapter::DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE = CameraParameters::TRUE;
+const char * OMXCameraAdapter::DEFAULT_MECHANICAL_MISALIGNMENT_CORRECTION_MODE = android::CameraParameters::TRUE;
const char OMXCameraAdapter::DEFAULT_EXIF_MAKE[] = "default_make";
const char OMXCameraAdapter::DEFAULT_EXIF_MODEL[] = "default_model";
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXExif.cpp b/camera/OMXCameraAdapter/OMXExif.cpp
index 8cd86ea..cbf7c1c 100644
--- a/camera/OMXCameraAdapter/OMXExif.cpp
+++ b/camera/OMXCameraAdapter/OMXExif.cpp
@@ -25,9 +25,10 @@
#include "OMXCameraAdapter.h"
#include <math.h>
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -36,7 +37,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
LOG_FUNCTION_NAME;
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LATITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -68,7 +69,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLatValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_LONGITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
@@ -100,7 +101,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mLongValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_ALTITUDE) ) != NULL )
{
gpsPos = strtod(valstr, NULL);
mEXIFData.mGPSData.mAltitude = floor(fabs(gpsPos));
@@ -116,7 +117,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mAltitudeValid= false;
}
- if( (valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
+ if( (valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP)) != NULL )
{
long gpsTimestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsTimestamp) );
@@ -137,7 +138,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mTimeStampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_TIMESTAMP) ) != NULL )
{
long gpsDatestamp = strtol(valstr, NULL, 10);
struct tm *timeinfo = gmtime( ( time_t * ) & (gpsDatestamp) );
@@ -156,7 +157,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
mEXIFData.mGPSData.mDatestampValid = false;
}
- if( ( valstr = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
+ if( ( valstr = params.get(android::CameraParameters::KEY_GPS_PROCESSING_METHOD) ) != NULL )
{
strncpy(mEXIFData.mGPSData.mProcMethod, valstr, GPS_PROCESSING_SIZE-1);
mEXIFData.mGPSData.mProcMethodValid = true;
@@ -209,7 +210,7 @@ status_t OMXCameraAdapter::setParametersEXIF(const CameraParameters &params,
}
- if( ( valstr = params.get(CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
+ if( ( valstr = params.get(android::CameraParameters::KEY_FOCAL_LENGTH) ) != NULL ) {
CAMHAL_LOGVB("EXIF Focal length: %s", valstr);
ExifElementsTable::stringToRational(valstr,
&mEXIFData.mFocalNum,
@@ -840,4 +841,5 @@ status_t OMXCameraAdapter::convertGPSCoord(double coord,
return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFD.cpp b/camera/OMXCameraAdapter/OMXFD.cpp
index 2381cfa..c9cfe75 100644
--- a/camera/OMXCameraAdapter/OMXFD.cpp
+++ b/camera/OMXCameraAdapter/OMXFD.cpp
@@ -33,9 +33,10 @@ static const int HorizontalFaceSizeThreshold = 30;
static const int VerticalFaceSizeThreshold = 30;
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersFD(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
@@ -51,7 +52,7 @@ status_t OMXCameraAdapter::startFaceDetection()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
ret = setFaceDetection(true, mDeviceOrientation);
if (ret != NO_ERROR) {
@@ -78,7 +79,7 @@ status_t OMXCameraAdapter::stopFaceDetection()
BaseCameraAdapter::AdapterState state;
BaseCameraAdapter::getState(state);
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
ret = setFaceDetection(false, mDeviceOrientation);
if (ret != NO_ERROR) {
@@ -106,7 +107,7 @@ status_t OMXCameraAdapter::stopFaceDetection()
void OMXCameraAdapter::pauseFaceDetection(bool pause)
{
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
// pausing will only take affect if fd is already running
if (mFaceDetectionRunning) {
mFaceDetectionPaused = pause;
@@ -118,7 +119,7 @@ status_t OMXCameraAdapter::setFaceDetectionOrientation(OMX_U32 orientation)
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
if (mFaceDetectionRunning) {
// restart face detection with new rotation
@@ -200,7 +201,7 @@ status_t OMXCameraAdapter::setFaceDetection(bool enable, OMX_U32 orientation)
}
status_t OMXCameraAdapter::createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraMetadataResult> &result,
+ android::sp<CameraMetadataResult> &result,
size_t previewWidth,
size_t previewHeight)
{
@@ -299,7 +300,7 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
return -ENOMEM;
}
- Mutex::Autolock lock(mFaceDetectionLock);
+ android::AutoMutex lock(mFaceDetectionLock);
if ( (NULL != faceData) && (0 < faceData->ulFaceCount) ) {
int orient_mult;
@@ -471,4 +472,5 @@ status_t OMXCameraAdapter::encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *fa
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXFocus.cpp b/camera/OMXCameraAdapter/OMXFocus.cpp
index d37c5b2..623a0c8 100644
--- a/camera/OMXCameraAdapter/OMXFocus.cpp
+++ b/camera/OMXCameraAdapter/OMXFocus.cpp
@@ -30,25 +30,26 @@
#define AF_IMAGE_CALLBACK_TIMEOUT 5000000 //5 seconds timeout
#define AF_VIDEO_CALLBACK_TIMEOUT 2800000 //2.8 seconds timeout
-namespace android {
+namespace Ti {
+namespace Camera {
const nsecs_t OMXCameraAdapter::CANCEL_AF_TIMEOUT = seconds_to_nanoseconds(1);
-status_t OMXCameraAdapter::setParametersFocus(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
const char *str = NULL;
- Vector< sp<CameraArea> > tempAreas;
+ android::Vector<android::sp<CameraArea> > tempAreas;
size_t MAX_FOCUS_AREAS;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mFocusAreasLock);
+ android::AutoMutex lock(mFocusAreasLock);
- str = params.get(CameraParameters::KEY_FOCUS_AREAS);
+ str = params.get(android::CameraParameters::KEY_FOCUS_AREAS);
- MAX_FOCUS_AREAS = atoi(params.get(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
+ MAX_FOCUS_AREAS = atoi(params.get(android::CameraParameters::KEY_MAX_NUM_FOCUS_AREAS));
if ( NULL != str ) {
ret = CameraArea::parseAreas(str, ( strlen(str) + 1 ), tempAreas);
@@ -167,11 +168,11 @@ status_t OMXCameraAdapter::doAutoFocus()
(OMX_INDEXTYPE)OMX_TI_IndexConfigAutofocusEnable,
&bOMX);
if ( OMX_ErrorNone != eError ) {
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
{
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
// force AF, Ducati will take care of whether CAF
// or AF will be performed, depending on light conditions
@@ -264,7 +265,7 @@ status_t OMXCameraAdapter::stopAutoFocus()
&focusControl);
if ( OMX_ErrorNone != eError ) {
CAMHAL_LOGEB("Error while stopping focus 0x%x", eError);
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
LOG_FUNCTION_NAME_EXIT;
@@ -296,7 +297,7 @@ status_t OMXCameraAdapter::getFocusMode(OMX_IMAGE_CONFIG_FOCUSCONTROLTYPE &focus
LOG_FUNCTION_NAME_EXIT;
- return ErrorUtils::omxToAndroidError(eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
}
status_t OMXCameraAdapter::cancelAutoFocus()
@@ -317,7 +318,7 @@ status_t OMXCameraAdapter::cancelAutoFocus()
( focusMode.eFocusControl != ( OMX_IMAGE_FOCUSCONTROLTYPE )
OMX_IMAGE_FocusControlAutoInfinity ) &&
( focusMode.eFocusControl != OMX_IMAGE_FocusControlOff ) ) {
- Mutex::Autolock lock(mCancelAFMutex);
+ android::AutoMutex lock(mCancelAFMutex);
stopAutoFocus();
ret = mCancelAFCond.waitRelative(mCancelAFMutex, CANCEL_AF_TIMEOUT);
if ( NO_ERROR != ret ) {
@@ -332,7 +333,7 @@ status_t OMXCameraAdapter::cancelAutoFocus()
{
// Signal to 'doAutoFocus()'
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
@@ -546,7 +547,7 @@ status_t OMXCameraAdapter::checkFocus(OMX_PARAM_FOCUSSTATUSTYPE *eFocusStatus)
return ret;
}
-status_t OMXCameraAdapter::updateFocusDistances(CameraParameters &params)
+status_t OMXCameraAdapter::updateFocusDistances(android::CameraParameters &params)
{
OMX_U32 focusNear, focusOptimal, focusFar;
status_t ret = NO_ERROR;
@@ -632,7 +633,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
{
if ( 0 == dist )
{
- strncpy(buffer, CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
+ strncpy(buffer, android::CameraParameters::FOCUS_DISTANCE_INFINITY, ( length - 1 ));
}
else
{
@@ -650,7 +651,7 @@ status_t OMXCameraAdapter::encodeFocusDistance(OMX_U32 dist, char *buffer, size_
status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params)
+ android::CameraParameters& params)
{
status_t ret = NO_ERROR;
@@ -689,7 +690,7 @@ status_t OMXCameraAdapter::addFocusDistances(OMX_U32 &near,
mFocusDistOptimal,
mFocusDistFar);
- params.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
+ params.set(android::CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistBuffer);
}
LOG_FUNCTION_NAME_EXIT;
@@ -836,14 +837,14 @@ void OMXCameraAdapter::handleFocusCallback() {
CAMHAL_LOGEA("Focus status check failed!");
// signal and unblock doAutoFocus
if (AF_ACTIVE & nextState) {
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
return;
}
if ( eFocusStatus.eFocusStatus == OMX_FocusStatusOff ) {
- Mutex::Autolock lock(mCancelAFMutex);
+ android::AutoMutex lock(mCancelAFMutex);
mCancelAFCond.signal();
return;
}
@@ -851,7 +852,7 @@ void OMXCameraAdapter::handleFocusCallback() {
if (eFocusStatus.eFocusStatus != OMX_FocusStatusRequest) {
// signal doAutoFocus when a end of scan message comes
// ignore start of scan
- Mutex::Autolock lock(mDoAFMutex);
+ android::AutoMutex lock(mDoAFMutex);
mDoAFCond.broadcast();
}
@@ -876,4 +877,5 @@ void OMXCameraAdapter::handleFocusCallback() {
notifyFocusSubscribers(focusStatus);
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXMetadata.cpp b/camera/OMXCameraAdapter/OMXMetadata.cpp
index c04b7af..2a31961 100644
--- a/camera/OMXCameraAdapter/OMXMetadata.cpp
+++ b/camera/OMXCameraAdapter/OMXMetadata.cpp
@@ -27,10 +27,11 @@
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
#ifdef OMAP_ENHANCEMENT_CPCAM
-status_t OMXCameraAdapter::setMetaData(CameraMetadata &meta_data, const OMX_PTR plat_pvt) const
+status_t OMXCameraAdapter::setMetaData(android::CameraMetadata &meta_data, const OMX_PTR plat_pvt) const
{
status_t ret = NO_ERROR;
OMX_OTHER_EXTRADATATYPE *extraData;
@@ -41,13 +42,13 @@ status_t OMXCameraAdapter::setMetaData(CameraMetadata &meta_data, const OMX_PTR
OMX_TI_WHITEBALANCERESULTTYPE * WBdata;
WBdata = (OMX_TI_WHITEBALANCERESULTTYPE*) extraData->data;
- meta_data.set(CameraMetadata::KEY_AWB_TEMP, (int)WBdata->nColorTemperature);
- meta_data.set4(CameraMetadata::KEY_AWB_GAINS,
+ meta_data.set(android::CameraMetadata::KEY_AWB_TEMP, (int)WBdata->nColorTemperature);
+ meta_data.set4(android::CameraMetadata::KEY_AWB_GAINS,
(int)WBdata->nGainR,
(int)WBdata->nGainGR,
(int)WBdata->nGainGB,
(int)WBdata->nGainB);
- meta_data.set4(CameraMetadata::KEY_AWB_OFFSETS,
+ meta_data.set4(android::CameraMetadata::KEY_AWB_OFFSETS,
(int)WBdata->nOffsetR,
(int)WBdata->nOffsetGR,
(int)WBdata->nOffsetGB,
@@ -62,22 +63,22 @@ status_t OMXCameraAdapter::setMetaData(CameraMetadata &meta_data, const OMX_PTR
OMX_TI_VECTSHOTINFOTYPE *shotInfo;
shotInfo = (OMX_TI_VECTSHOTINFOTYPE*) extraData->data;
- meta_data.set(CameraMetadata::KEY_FRAME_NUMBER, (int)shotInfo->nFrameNum);
- meta_data.set(CameraMetadata::KEY_SHOT_NUMBER, (int)shotInfo->nConfigId);
- meta_data.set(CameraMetadata::KEY_ANALOG_GAIN, (int)shotInfo->nAGain);
- meta_data.set(CameraMetadata::KEY_ANALOG_GAIN_REQ, (int)shotInfo->nReqGain);
- meta_data.set(CameraMetadata::KEY_ANALOG_GAIN_MIN, (int)shotInfo->nGainMin);
- meta_data.set(CameraMetadata::KEY_ANALOG_GAIN_MAX, (int)shotInfo->nGainMax);
- meta_data.set(CameraMetadata::KEY_ANALOG_GAIN_DEV, (int)shotInfo->nDevAGain);
- meta_data.set(CameraMetadata::KEY_ANALOG_GAIN_ERROR, (int)shotInfo->nSenAGainErr);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_TIME, (int)shotInfo->nExpTime);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_TIME_REQ, (int)shotInfo->nReqExpTime);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_TIME_MIN, (int)shotInfo->nExpMin);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_TIME_MAX, (int)shotInfo->nExpMax);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_TIME_DEV, (int)shotInfo->nDevExpTime);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_TIME_ERROR, (int)shotInfo->nSenExpTimeErr);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_COMPENSATION_REQ, (int)shotInfo->nReqEC);
- meta_data.set(CameraMetadata::KEY_EXPOSURE_DEV, (int)shotInfo->nDevEV);
+ meta_data.set(android::CameraMetadata::KEY_FRAME_NUMBER, (int)shotInfo->nFrameNum);
+ meta_data.set(android::CameraMetadata::KEY_SHOT_NUMBER, (int)shotInfo->nConfigId);
+ meta_data.set(android::CameraMetadata::KEY_ANALOG_GAIN, (int)shotInfo->nAGain);
+ meta_data.set(android::CameraMetadata::KEY_ANALOG_GAIN_REQ, (int)shotInfo->nReqGain);
+ meta_data.set(android::CameraMetadata::KEY_ANALOG_GAIN_MIN, (int)shotInfo->nGainMin);
+ meta_data.set(android::CameraMetadata::KEY_ANALOG_GAIN_MAX, (int)shotInfo->nGainMax);
+ meta_data.set(android::CameraMetadata::KEY_ANALOG_GAIN_DEV, (int)shotInfo->nDevAGain);
+ meta_data.set(android::CameraMetadata::KEY_ANALOG_GAIN_ERROR, (int)shotInfo->nSenAGainErr);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_TIME, (int)shotInfo->nExpTime);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_TIME_REQ, (int)shotInfo->nReqExpTime);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_TIME_MIN, (int)shotInfo->nExpMin);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_TIME_MAX, (int)shotInfo->nExpMax);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_TIME_DEV, (int)shotInfo->nDevExpTime);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_TIME_ERROR, (int)shotInfo->nSenExpTimeErr);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_COMPENSATION_REQ, (int)shotInfo->nReqEC);
+ meta_data.set(android::CameraMetadata::KEY_EXPOSURE_DEV, (int)shotInfo->nDevEV);
}
// TODO(XXX): Use format abstraction for LSC values
@@ -87,7 +88,7 @@ status_t OMXCameraAdapter::setMetaData(CameraMetadata &meta_data, const OMX_PTR
if ( NULL != extraData ) {
OMX_TI_LSCTABLETYPE *lscTbl;
OMX_U8 *lsc;
- String8 val;
+ android::String8 val;
lscTbl = (OMX_TI_LSCTABLETYPE*) extraData->data;
lsc = lscTbl->pGainTable;
if ( (0U == lscTbl->nWidth) || (0U == lscTbl->nHeight) ) {
@@ -113,8 +114,8 @@ status_t OMXCameraAdapter::setMetaData(CameraMetadata &meta_data, const OMX_PTR
}
val.append(")");
}
- meta_data.set(CameraMetadata::KEY_LSC_TABLE, val);
- meta_data.setBool(CameraMetadata::KEY_LSC_TABLE_APPLIED,
+ meta_data.set(android::CameraMetadata::KEY_LSC_TABLE, val);
+ meta_data.setBool(android::CameraMetadata::KEY_LSC_TABLE_APPLIED,
(OMX_TRUE == lscTbl->bApplied) ? true : false);
}
}
@@ -147,4 +148,5 @@ void OMXCameraAdapter::encodePreviewMetadata(camera_frame_metadata_t *meta, cons
#endif
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXReprocess.cpp b/camera/OMXCameraAdapter/OMXReprocess.cpp
index b80fba1..28f1744 100644
--- a/camera/OMXCameraAdapter/OMXReprocess.cpp
+++ b/camera/OMXCameraAdapter/OMXReprocess.cpp
@@ -26,9 +26,10 @@
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
-status_t OMXCameraAdapter::setParametersReprocess(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersReprocess(const android::CameraParameters &params,
CameraBuffer* buffers,
BaseCameraAdapter::AdapterState state)
{
@@ -53,10 +54,10 @@ status_t OMXCameraAdapter::setParametersReprocess(const CameraParameters &params
valstr = buffers[0].format;
if (valstr != NULL) {
- if(strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
+ if(strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_YUV420SP) == 0) {
CAMHAL_LOGDA("YUV420SP format selected");
pixFormat = OMX_COLOR_FormatYUV420SemiPlanar;
- } else if (strcmp(valstr, (const char *) CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
+ } else if (strcmp(valstr, android::CameraParameters::PIXEL_FORMAT_BAYER_RGGB) == 0) {
CAMHAL_LOGDA("RAW Picture format selected");
pixFormat = OMX_COLOR_FormatRawBayer10bit;
} else {
@@ -110,7 +111,7 @@ status_t OMXCameraAdapter::startReprocess()
CAMHAL_LOGD ("mReprocConfigured = %d", mBurstFramesQueued);
if (NO_ERROR == ret) {
- Mutex::Autolock lock(mBurstLock);
+ android::AutoMutex lock(mBurstLock);
for ( int index = 0 ; index < portData->mMaxQueueable ; index++ ) {
CAMHAL_LOGDB("Queuing buffer on video input port - %p",
@@ -122,13 +123,13 @@ status_t OMXCameraAdapter::startReprocess()
}
}
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::stopReprocess()
@@ -191,7 +192,7 @@ status_t OMXCameraAdapter::stopReprocess()
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::disableReprocess(){
@@ -201,7 +202,7 @@ status_t OMXCameraAdapter::disableReprocess(){
// no-op..for now
EXIT:
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
@@ -321,7 +322,7 @@ status_t OMXCameraAdapter::UseBuffersReprocess(CameraBuffer *bufArr, int num)
mReprocConfigured = true;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
EXIT:
CAMHAL_LOGEB("Exiting function %s because of ret %d eError=%x", __FUNCTION__, ret, eError);
@@ -331,8 +332,9 @@ EXIT:
}
performCleanupAfterError();
LOG_FUNCTION_NAME_EXIT;
- return (ret | ErrorUtils::omxToAndroidError(eError));
+ return (ret | Utils::ErrorUtils::omxToAndroidError(eError));
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OMXCameraAdapter/OMXZoom.cpp b/camera/OMXCameraAdapter/OMXZoom.cpp
index f2775b5..e39a3b0 100644
--- a/camera/OMXCameraAdapter/OMXZoom.cpp
+++ b/camera/OMXCameraAdapter/OMXZoom.cpp
@@ -24,7 +24,8 @@
#include "CameraHal.h"
#include "OMXCameraAdapter.h"
-namespace android {
+namespace Ti {
+namespace Camera {
const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
65536, 68157, 70124, 72745,
@@ -45,18 +46,18 @@ const int32_t OMXCameraAdapter::ZOOM_STEPS [ZOOM_STAGES] = {
524288 };
-status_t OMXCameraAdapter::setParametersZoom(const CameraParameters &params,
+status_t OMXCameraAdapter::setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state)
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
//Immediate zoom should not be avaialable while smooth zoom is running
if ( ( ZOOM_ACTIVE & state ) != ZOOM_ACTIVE )
{
- int zoom = params.getInt(CameraParameters::KEY_ZOOM);
+ int zoom = params.getInt(android::CameraParameters::KEY_ZOOM);
if (( zoom >= 0 ) && ( zoom < mMaxZoomSupported )) {
mTargetZoomIdx = zoom;
@@ -133,7 +134,7 @@ status_t OMXCameraAdapter::advanceZoom()
{
status_t ret = NO_ERROR;
AdapterState state;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
BaseCameraAdapter::getState(state);
@@ -235,7 +236,7 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
CAMHAL_LOGDB("Start smooth zoom target = %d, mCurrentIdx = %d",
targetIdx,
@@ -258,7 +259,7 @@ status_t OMXCameraAdapter::startSmoothZoom(int targetIdx)
status_t OMXCameraAdapter::stopSmoothZoom()
{
status_t ret = NO_ERROR;
- Mutex::Autolock lock(mZoomLock);
+ android::AutoMutex lock(mZoomLock);
LOG_FUNCTION_NAME;
@@ -284,4 +285,5 @@ status_t OMXCameraAdapter::stopSmoothZoom()
return ret;
}
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/SensorListener.cpp b/camera/SensorListener.cpp
index a5ddbd5..45a278b 100644
--- a/camera/SensorListener.cpp
+++ b/camera/SensorListener.cpp
@@ -27,7 +27,8 @@
#include <math.h>
#include <sys/types.h>
-namespace android {
+namespace Ti {
+namespace Camera {
/*** static declarations ***/
static const float RADIANS_2_DEG = (float) (180 / M_PI);
@@ -43,7 +44,7 @@ static int sensor_events_listener(int fd, int events, void* data)
ASensorEvent sen_events[8];
while ((num_sensors = listener->mSensorEventQueue->read(sen_events, 8)) > 0) {
for (int i = 0; i < num_sensors; i++) {
- if (sen_events[i].type == Sensor::TYPE_ACCELEROMETER) {
+ if (sen_events[i].type == android::Sensor::TYPE_ACCELEROMETER) {
float x = sen_events[i].vector.azimuth;
float y = sen_events[i].vector.pitch;
float z = sen_events[i].vector.roll;
@@ -76,7 +77,7 @@ static int sensor_events_listener(int fd, int events, void* data)
}
listener->handleOrientation(orient, tilt);
CAMHAL_LOGVB(" tilt = %d orientation = %d", tilt, orient);
- } else if (sen_events[i].type == Sensor::TYPE_GYROSCOPE) {
+ } else if (sen_events[i].type == android::Sensor::TYPE_GYROSCOPE) {
CAMHAL_LOGVA("GYROSCOPE EVENT");
}
}
@@ -129,11 +130,11 @@ SensorListener::~SensorListener() {
status_t SensorListener::initialize() {
status_t ret = NO_ERROR;
- SensorManager& mgr(SensorManager::getInstance());
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- sp<Looper> mLooper;
+ android::sp<android::Looper> mLooper;
mSensorEventQueue = mgr.createEventQueue();
if (mSensorEventQueue == NULL) {
@@ -142,7 +143,7 @@ status_t SensorListener::initialize() {
goto out;
}
- mLooper = new Looper(false);
+ mLooper = new android::Looper(false);
mLooper->addFd(mSensorEventQueue->getFd(), 0, ALOOPER_EVENT_INPUT, sensor_events_listener, this);
if (mSensorLooperThread.get() == NULL)
@@ -154,7 +155,7 @@ status_t SensorListener::initialize() {
goto out;
}
- ret = mSensorLooperThread->run("sensor looper thread", PRIORITY_URGENT_DISPLAY);
+ ret = mSensorLooperThread->run("sensor looper thread", android::PRIORITY_URGENT_DISPLAY);
if (ret == INVALID_OPERATION){
CAMHAL_LOGDA("thread already running ?!?");
} else if (ret != NO_ERROR) {
@@ -181,7 +182,7 @@ void SensorListener::setCallbacks(orientation_callback_t orientation_cb, void *c
void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if (mOrientationCb && (sensorsEnabled & SENSOR_ORIENTATION)) {
mOrientationCb(orientation, tilt, mCbCookie);
@@ -191,15 +192,15 @@ void SensorListener::handleOrientation(uint32_t orientation, uint32_t tilt) {
}
void SensorListener::enableSensor(sensor_type_t type) {
- Sensor const* sensor;
- SensorManager& mgr(SensorManager::getInstance());
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if ((type & SENSOR_ORIENTATION) && !(sensorsEnabled & SENSOR_ORIENTATION)) {
- sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->enableSensor(sensor);
mSensorEventQueue->setEventRate(sensor, ms2ns(100));
@@ -210,15 +211,15 @@ void SensorListener::enableSensor(sensor_type_t type) {
}
void SensorListener::disableSensor(sensor_type_t type) {
- Sensor const* sensor;
- SensorManager& mgr(SensorManager::getInstance());
+ android::Sensor const* sensor;
+ android::SensorManager& mgr(android::SensorManager::getInstance());
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(&mLock);
+ android::AutoMutex lock(&mLock);
if ((type & SENSOR_ORIENTATION) && (sensorsEnabled & SENSOR_ORIENTATION)) {
- sensor = mgr.getDefaultSensor(Sensor::TYPE_ACCELEROMETER);
+ sensor = mgr.getDefaultSensor(android::Sensor::TYPE_ACCELEROMETER);
CAMHAL_LOGDB("orientation = %p (%s)", sensor, sensor->getName().string());
mSensorEventQueue->disableSensor(sensor);
sensorsEnabled &= ~SENSOR_ORIENTATION;
@@ -227,4 +228,5 @@ void SensorListener::disableSensor(sensor_type_t type) {
LOG_FUNCTION_NAME_EXIT;
}
-} // namespace android
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/TICameraParameters.cpp b/camera/TICameraParameters.cpp
index fc3e7be..ed8105e 100644
--- a/camera/TICameraParameters.cpp
+++ b/camera/TICameraParameters.cpp
@@ -22,7 +22,8 @@
#define TI_KEY_ALGO_PREFIX "ti-algo-"
-namespace android {
+namespace Ti {
+namespace Camera {
//TI extensions to camera mode
const char TICameraParameters::HIGH_PERFORMANCE_MODE[] = "high-performance";
@@ -226,5 +227,5 @@ const char TICameraParameters::KEY_ALGO_SHARPENING[] = TI_KEY_ALGO_PREFIX "sharp
const char TICameraParameters::KEY_ALGO_THREELINCOLORMAP[] = TI_KEY_ALGO_PREFIX "threelinecolormap";
const char TICameraParameters::KEY_ALGO_GIC[] = TI_KEY_ALGO_PREFIX "gic";
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index 503d3dd..29c71c7 100644
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -49,7 +49,8 @@ static int mDebugFps = 0;
#define HERE(Msg) {CAMHAL_LOGEB("--=== %s===--\n", Msg);}
-namespace android {
+namespace Ti {
+namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
@@ -64,7 +65,7 @@ static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size )
static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int width, int height );
-Mutex gV4LAdapterLock;
+android::Mutex gV4LAdapterLock;
char device[15];
@@ -377,7 +378,7 @@ EXIT:
}
-status_t V4LCameraAdapter::setParameters(const CameraParameters &params)
+status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params)
{
status_t ret = NO_ERROR;
int width, height;
@@ -419,7 +420,7 @@ EXIT:
}
-void V4LCameraAdapter::getParameters(CameraParameters& params)
+void V4LCameraAdapter::getParameters(android::CameraParameters& params)
{
LOG_FUNCTION_NAME;
@@ -437,7 +438,7 @@ status_t V4LCameraAdapter::useBuffers(CameraMode mode, CameraBuffer *bufArr, int
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
switch(mode)
{
@@ -530,7 +531,7 @@ status_t V4LCameraAdapter::takePicture() {
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mCaptureBufsLock);
+ android::AutoMutex lock(mCaptureBufsLock);
if(mCapturing) {
CAMHAL_LOGEA("Already Capture in Progress...");
@@ -685,7 +686,7 @@ status_t V4LCameraAdapter::startPreview()
status_t ret = NO_ERROR;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mPreviewBufsLock);
+ android::AutoMutex lock(mPreviewBufsLock);
if(mPreviewing) {
ret = BAD_VALUE;
@@ -729,7 +730,7 @@ status_t V4LCameraAdapter::stopPreview()
int ret = NO_ERROR;
LOG_FUNCTION_NAME;
- Mutex::Autolock lock(mStopPreviewLock);
+ android::AutoMutex lock(mStopPreviewLock);
if(!mPreviewing) {
return NO_INIT;
@@ -1210,7 +1211,7 @@ void detectVideoDevice(char** video_device_list, int& num_device) {
extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
{
CameraAdapter *adapter = NULL;
- Mutex::Autolock lock(gV4LAdapterLock);
+ android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
@@ -1309,7 +1310,8 @@ EXIT:
return NO_ERROR;
}
-};
+} // namespace Camera
+} // namespace Ti
/*--------------------Camera Adapter Class ENDS here-----------------------------*/
diff --git a/camera/V4LCameraAdapter/V4LCapabilities.cpp b/camera/V4LCameraAdapter/V4LCapabilities.cpp
index 338b5e4..3a84268 100644
--- a/camera/V4LCameraAdapter/V4LCapabilities.cpp
+++ b/camera/V4LCameraAdapter/V4LCapabilities.cpp
@@ -26,7 +26,8 @@
#include "ErrorUtils.h"
#include "TICameraParameters.h"
-namespace android {
+namespace Ti {
+namespace Camera {
/************************************
* global constants and variables
@@ -47,13 +48,13 @@ const char V4LCameraAdapter::DEFAULT_PREVIEW_SIZE[] = "640x480";
const char V4LCameraAdapter::DEFAULT_NUM_PREV_BUFS[] = "6";
const char V4LCameraAdapter::DEFAULT_FRAMERATE[] = "30";
const char V4LCameraAdapter::DEFAULT_FOCUS_MODE[] = "infinity";
-const char * V4LCameraAdapter::DEFAULT_VSTAB = CameraParameters::FALSE;
-const char * V4LCameraAdapter::DEFAULT_VNF = CameraParameters::FALSE;
+const char * V4LCameraAdapter::DEFAULT_VSTAB = android::CameraParameters::FALSE;
+const char * V4LCameraAdapter::DEFAULT_VNF = android::CameraParameters::FALSE;
const CapPixelformat V4LCameraAdapter::mPixelformats [] = {
- { V4L2_PIX_FMT_YUYV, CameraParameters::PIXEL_FORMAT_YUV422I },
- { V4L2_PIX_FMT_JPEG, CameraParameters::PIXEL_FORMAT_JPEG },
+ { V4L2_PIX_FMT_YUYV, android::CameraParameters::PIXEL_FORMAT_YUV422I },
+ { V4L2_PIX_FMT_JPEG, android::CameraParameters::PIXEL_FORMAT_JPEG },
};
/*****************************************
@@ -113,7 +114,7 @@ status_t V4LCameraAdapter::insertPreviewFormats(CameraProperties::Properties* pa
}
}
}
- strncat(supported, CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
+ strncat(supported, android::CameraParameters::PIXEL_FORMAT_YUV420P, MAX_PROP_VALUE_LENGTH - 1);
params->set(CameraProperties::SUPPORTED_PREVIEW_FORMATS, supported);
return NO_ERROR;
}
@@ -341,4 +342,5 @@ status_t V4LCameraAdapter::getCaps(const int sensorId, CameraProperties::Propert
-};
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/ANativeWindowDisplayAdapter.h b/camera/inc/ANativeWindowDisplayAdapter.h
index 29379c7..560e98d 100644
--- a/camera/inc/ANativeWindowDisplayAdapter.h
+++ b/camera/inc/ANativeWindowDisplayAdapter.h
@@ -20,7 +20,8 @@
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-namespace android {
+namespace Ti {
+namespace Camera {
/**
* Display handler class - This class basically handles the buffer posting to display
@@ -103,17 +104,17 @@ public:
static const int DISPLAY_TIMEOUT;
static const int FAILED_DQS_TO_SUSPEND;
- class DisplayThread : public Thread
+ class DisplayThread : public android::Thread
{
ANativeWindowDisplayAdapter* mDisplayAdapter;
- TIUTILS::MessageQueue mDisplayThreadQ;
+ Utils::MessageQueue mDisplayThreadQ;
public:
DisplayThread(ANativeWindowDisplayAdapter* da)
: Thread(false), mDisplayAdapter(da) { }
///Returns a reference to the display message Q for display adapter to post messages
- TIUTILS::MessageQueue& msgQ()
+ Utils::MessageQueue& msgQ()
{
return mDisplayThreadQ;
}
@@ -145,12 +146,12 @@ private:
int mFailedDQs;
bool mPaused; //Pause state
preview_stream_ops_t* mANativeWindow;
- sp<DisplayThread> mDisplayThread;
+ android::sp<DisplayThread> mDisplayThread;
FrameProvider *mFrameProvider; ///Pointer to the frame provider interface
- TIUTILS::MessageQueue mDisplayQ;
+ Utils::MessageQueue mDisplayQ;
unsigned int mDisplayState;
///@todo Have a common class for these members
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
bool mDisplayEnabled;
int mBufferCount;
CameraBuffer *mBuffers;
@@ -158,9 +159,9 @@ private:
//IMG_native_handle_t** mGrallocHandleMap; // -> frames[i].GrallocHandle
uint32_t* mOffsetsMap; // -> frames[i].Offset
int mFD;
- KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
- KeyedVector<int, int> mFramesType;
- sp<ErrorNotifier> mErrorNotifier;
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::KeyedVector<int, int> mFramesType;
+ android::sp<ErrorNotifier> mErrorNotifier;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
@@ -184,5 +185,5 @@ private:
};
-};
-
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/inc/BaseCameraAdapter.h b/camera/inc/BaseCameraAdapter.h
index 90c7304..4d7a09e 100644
--- a/camera/inc/BaseCameraAdapter.h
+++ b/camera/inc/BaseCameraAdapter.h
@@ -21,7 +21,8 @@
#include "CameraHal.h"
-namespace android {
+namespace Ti {
+namespace Camera {
struct LUT {
const char * userDefinition;
@@ -54,8 +55,8 @@ public:
virtual void removeFramePointers();
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params) = 0;
- virtual void getParameters(CameraParameters& params) = 0;
+ virtual status_t setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
//API to send a command to the camera
virtual status_t sendCommand(CameraCommands operation, int value1 = 0, int value2 = 0, int value3 = 0, int value4 = 0 );
@@ -164,7 +165,7 @@ protected:
status_t notifyFocusSubscribers(CameraHalEvent::FocusStatus status);
status_t notifyShutterSubscribers();
status_t notifyZoomSubscribers(int zoomIdx, bool targetReached);
- status_t notifyMetadataSubscribers(sp<CameraMetadataResult> &meta);
+ status_t notifyMetadataSubscribers(android::sp<CameraMetadataResult> &meta);
//Send the frame to subscribers
status_t sendFrameToSubscribers(CameraFrame *frame);
@@ -181,7 +182,7 @@ protected:
// private member functions
private:
status_t __sendFrameToSubscribers(CameraFrame* frame,
- KeyedVector<int, frame_callback> *subscribers,
+ android::KeyedVector<int, frame_callback> *subscribers,
CameraFrame::FrameType frameType);
status_t rollbackToPreviousState();
@@ -215,66 +216,66 @@ protected:
#endif
- mutable Mutex mReturnFrameLock;
+ mutable android::Mutex mReturnFrameLock;
//Lock protecting the Adapter state
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
AdapterState mAdapterState;
AdapterState mNextState;
//Different frame subscribers get stored using these
- KeyedVector<int, frame_callback> mFrameSubscribers;
- KeyedVector<int, frame_callback> mSnapshotSubscribers;
- KeyedVector<int, frame_callback> mFrameDataSubscribers;
- KeyedVector<int, frame_callback> mVideoSubscribers;
- KeyedVector<int, frame_callback> mVideoInSubscribers;
- KeyedVector<int, frame_callback> mImageSubscribers;
- KeyedVector<int, frame_callback> mRawSubscribers;
- KeyedVector<int, event_callback> mFocusSubscribers;
- KeyedVector<int, event_callback> mZoomSubscribers;
- KeyedVector<int, event_callback> mShutterSubscribers;
- KeyedVector<int, event_callback> mMetadataSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameSubscribers;
+ android::KeyedVector<int, frame_callback> mSnapshotSubscribers;
+ android::KeyedVector<int, frame_callback> mFrameDataSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoSubscribers;
+ android::KeyedVector<int, frame_callback> mVideoInSubscribers;
+ android::KeyedVector<int, frame_callback> mImageSubscribers;
+ android::KeyedVector<int, frame_callback> mRawSubscribers;
+ android::KeyedVector<int, event_callback> mFocusSubscribers;
+ android::KeyedVector<int, event_callback> mZoomSubscribers;
+ android::KeyedVector<int, event_callback> mShutterSubscribers;
+ android::KeyedVector<int, event_callback> mMetadataSubscribers;
//Preview buffer management data
CameraBuffer *mPreviewBuffers;
int mPreviewBufferCount;
size_t mPreviewBuffersLength;
- KeyedVector<CameraBuffer *, int> mPreviewBuffersAvailable;
- mutable Mutex mPreviewBufferLock;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBuffersAvailable;
+ mutable android::Mutex mPreviewBufferLock;
//Snapshot buffer management data
- KeyedVector<int, int> mSnapshotBuffersAvailable;
- mutable Mutex mSnapshotBufferLock;
+ android::KeyedVector<int, int> mSnapshotBuffersAvailable;
+ mutable android::Mutex mSnapshotBufferLock;
//Video buffer management data
CameraBuffer *mVideoBuffers;
- KeyedVector<CameraBuffer *, int> mVideoBuffersAvailable;
+ android::KeyedVector<CameraBuffer *, int> mVideoBuffersAvailable;
int mVideoBuffersCount;
size_t mVideoBuffersLength;
- mutable Mutex mVideoBufferLock;
+ mutable android::Mutex mVideoBufferLock;
//Image buffer management data
CameraBuffer *mCaptureBuffers;
- KeyedVector<CameraBuffer *, int> mCaptureBuffersAvailable;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBuffersAvailable;
int mCaptureBuffersCount;
size_t mCaptureBuffersLength;
- mutable Mutex mCaptureBufferLock;
+ mutable android::Mutex mCaptureBufferLock;
//Metadata buffermanagement
CameraBuffer *mPreviewDataBuffers;
- KeyedVector<CameraBuffer *, int> mPreviewDataBuffersAvailable;
+ android::KeyedVector<CameraBuffer *, int> mPreviewDataBuffersAvailable;
int mPreviewDataBuffersCount;
size_t mPreviewDataBuffersLength;
- mutable Mutex mPreviewDataBufferLock;
+ mutable android::Mutex mPreviewDataBufferLock;
//Video input buffer management data (used for reproc pipe)
CameraBuffer *mVideoInBuffers;
- KeyedVector<CameraBuffer *, int> mVideoInBuffersAvailable;
- mutable Mutex mVideoInBufferLock;
+ android::KeyedVector<CameraBuffer *, int> mVideoInBuffersAvailable;
+ mutable android::Mutex mVideoInBufferLock;
- TIUTILS::MessageQueue mFrameQ;
- TIUTILS::MessageQueue mAdapterQ;
- mutable Mutex mSubscriberLock;
+ Utils::MessageQueue mFrameQ;
+ Utils::MessageQueue mAdapterQ;
+ mutable android::Mutex mSubscriberLock;
ErrorNotifier *mErrorNotifier;
release_image_buffers_callback mReleaseImageBuffersCallback;
end_image_capture_callback mEndImageCaptureCallback;
@@ -287,13 +288,15 @@ protected:
uint32_t mFramesWithEncoder;
#ifdef CAMERAHAL_DEBUG
- KeyedVector<int, bool> mBuffersWithDucati;
+ android::KeyedVector<int, bool> mBuffersWithDucati;
#endif
- KeyedVector<void *, CameraFrame *> mFrameQueue;
+ android::KeyedVector<void *, CameraFrame *> mFrameQueue;
};
-}
+} // namespace Camera
+} // namespace Ti
+
#endif //BASE_CAMERA_ADAPTER_H
diff --git a/camera/inc/BufferSourceAdapter.h b/camera/inc/BufferSourceAdapter.h
index 2973d7e..1514dc5 100644
--- a/camera/inc/BufferSourceAdapter.h
+++ b/camera/inc/BufferSourceAdapter.h
@@ -23,7 +23,8 @@
#include <ui/GraphicBufferMapper.h>
#include <hal_public.h>
-namespace android {
+namespace Ti {
+namespace Camera {
/**
* Handles enqueueing/dequeing buffers to tap-in/tap-out points
@@ -37,7 +38,7 @@ class BufferSourceAdapter : public DisplayAdapter
// private types
private:
// helper class to return frame in different thread context
- class ReturnFrame : public Thread {
+ class ReturnFrame : public android::Thread {
public:
ReturnFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
mWaitForSignal.Create(0);
@@ -61,12 +62,12 @@ private:
private:
BufferSourceAdapter* mBufferSourceAdapter;
- Semaphore mWaitForSignal;
+ Utils::Semaphore mWaitForSignal;
bool mDestroying;
};
// helper class to queue frame in different thread context
- class QueueFrame : public Thread {
+ class QueueFrame : public android::Thread {
public:
QueueFrame(BufferSourceAdapter* __this) : mBufferSourceAdapter(__this) {
mDestroying = false;
@@ -75,7 +76,7 @@ private:
~QueueFrame() {
mDestroying = true;
- Mutex::Autolock lock(mFramesMutex);
+ android::AutoMutex lock(mFramesMutex);
while (!mFrames.empty()) {
CameraFrame *frame = mFrames.itemAt(0);
mFrames.removeAt(0);
@@ -85,7 +86,7 @@ private:
}
void addFrame(CameraFrame *frame) {
- Mutex::Autolock lock(mFramesMutex);
+ android::AutoMutex lock(mFramesMutex);
mFrames.add(new CameraFrame(*frame));
mFramesCondition.signal();
}
@@ -93,7 +94,7 @@ private:
virtual bool threadLoop() {
CameraFrame *frame = NULL;
{
- Mutex::Autolock lock(mFramesMutex);
+ android::AutoMutex lock(mFramesMutex);
while (mFrames.empty() && !mDestroying) mFramesCondition.wait(mFramesMutex);
if (!mDestroying) {
frame = mFrames.itemAt(0);
@@ -111,9 +112,9 @@ private:
private:
BufferSourceAdapter* mBufferSourceAdapter;
- Vector<CameraFrame *> mFrames;
- Condition mFramesCondition;
- Mutex mFramesMutex;
+ android::Vector<CameraFrame *> mFrames;
+ android::Condition mFramesCondition;
+ android::Mutex mFramesMutex;
bool mDestroying;
};
@@ -160,14 +161,14 @@ private:
preview_stream_ops_t* mBufferSource;
FrameProvider *mFrameProvider; // Pointer to the frame provider interface
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
int mBufferCount;
CameraBuffer *mBuffers;
- KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
- sp<ErrorNotifier> mErrorNotifier;
- sp<ReturnFrame> mReturnFrame;
- sp<QueueFrame> mQueueFrame;
+ android::KeyedVector<buffer_handle_t *, int> mFramesWithCameraAdapterMap;
+ android::sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ReturnFrame> mReturnFrame;
+ android::sp<QueueFrame> mQueueFrame;
uint32_t mFrameWidth;
uint32_t mFrameHeight;
@@ -179,7 +180,8 @@ private:
const char *mPixelFormat;
};
-};
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/CameraHal.h b/camera/inc/CameraHal.h
index 44b2fbb..965f1f3 100644
--- a/camera/inc/CameraHal.h
+++ b/camera/inc/CameraHal.h
@@ -100,7 +100,8 @@
#define CAMHAL_SIZE_OF_ARRAY(x) static_cast<int>(sizeof(x)/sizeof(x[0]))
-namespace android {
+namespace Ti {
+namespace Camera {
#ifdef CAMERAHAL_USE_RAW_IMAGE_SAVING
extern const char * const kRawImagesOutputDirPath;
@@ -178,7 +179,7 @@ inline int FpsRange::min() const { return mMin; }
inline int FpsRange::max() const { return mMax; }
-class CameraArea : public RefBase
+class CameraArea : public android::RefBase
{
public:
@@ -215,11 +216,11 @@ public:
return mWeight;
}
- bool compare(const sp<CameraArea> &area);
+ bool compare(const android::sp<CameraArea> &area);
static status_t parseAreas(const char *area,
size_t areaLength,
- Vector< sp<CameraArea> > &areas);
+ android::Vector< android::sp<CameraArea> > &areas);
static status_t checkArea(ssize_t top,
ssize_t left,
@@ -227,7 +228,7 @@ public:
ssize_t right,
ssize_t weight);
- static bool areAreasDifferent(Vector< sp<CameraArea> > &, Vector< sp<CameraArea> > &);
+ static bool areAreasDifferent(android::Vector< android::sp<CameraArea> > &, android::Vector< android::sp<CameraArea> > &);
protected:
static const ssize_t TOP = -1000;
@@ -244,7 +245,7 @@ protected:
size_t mWeight;
};
-class CameraMetadataResult : public RefBase
+class CameraMetadataResult : public android::RefBase
{
public:
@@ -386,7 +387,7 @@ class CameraFrame
unsigned int mQuirks;
unsigned int mYuv[2];
#ifdef OMAP_ENHANCEMENT_CPCAM
- CameraMetadata mMetaData;
+ android::CameraMetadata mMetaData;
#endif
///@todo add other member vars like stride etc
};
@@ -450,9 +451,9 @@ public:
size_t score;
} FaceData;
- typedef sp<CameraMetadataResult> MetaEventData;
+ typedef android::sp<CameraMetadataResult> MetaEventData;
- class CameraHalEventData : public RefBase{
+ class CameraHalEventData : public android::RefBase{
public:
@@ -475,7 +476,7 @@ public:
void* mCookie;
CameraHalEventType mEventType;
- sp<CameraHalEventData> mEventData;
+ android::sp<CameraHalEventData> mEventData;
};
@@ -507,7 +508,7 @@ public:
virtual ~MessageNotifier() {};
};
-class ErrorNotifier : public virtual RefBase
+class ErrorNotifier : public virtual android::RefBase
{
public:
virtual void errorNotify(int error) = 0;
@@ -592,7 +593,7 @@ public:
/**
* Class for handling data and notify callbacks to application
*/
-class AppCallbackNotifier: public ErrorNotifier , public virtual RefBase
+class AppCallbackNotifier: public ErrorNotifier , public virtual android::RefBase
{
public:
@@ -634,7 +635,7 @@ public:
//All sub-components of Camera HAL call this whenever any error happens
virtual void errorNotify(int error);
- status_t startPreviewCallbacks(CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
+ status_t startPreviewCallbacks(android::CameraParameters &params, CameraBuffer *buffers, uint32_t *offsets, int fd, size_t length, size_t count);
status_t stopPreviewCallbacks();
status_t enableMsgType(int32_t msgType);
@@ -681,9 +682,9 @@ public:
void flushEventQueue();
//Internal class definitions
- class NotificationThread : public Thread {
+ class NotificationThread : public android::Thread {
AppCallbackNotifier* mAppCallbackNotifier;
- TIUTILS::MessageQueue mNotificationThreadQ;
+ Utils::MessageQueue mNotificationThreadQ;
public:
enum NotificationThreadCommands
{
@@ -698,7 +699,7 @@ public:
return mAppCallbackNotifier->notificationThread();
}
- TIUTILS::MessageQueue &msgQ() { return mNotificationThreadQ;}
+ Utils::MessageQueue &msgQ() { return mNotificationThreadQ;}
};
//Friend declarations
@@ -716,8 +717,8 @@ private:
const char* getContstantForPixelFormat(const char *pixelFormat);
private:
- mutable Mutex mLock;
- mutable Mutex mBurstLock;
+ mutable android::Mutex mLock;
+ mutable android::Mutex mBurstLock;
CameraHal* mCameraHal;
camera_notify_callback mNotifyCb;
camera_data_callback mDataCb;
@@ -727,21 +728,21 @@ private:
//Keeps Video MemoryHeaps and Buffers within
//these objects
- KeyedVector<unsigned int, unsigned int> mVideoHeaps;
- KeyedVector<unsigned int, unsigned int> mVideoBuffers;
- KeyedVector<void *, CameraBuffer *> mVideoMap;
+ android::KeyedVector<unsigned int, unsigned int> mVideoHeaps;
+ android::KeyedVector<unsigned int, unsigned int> mVideoBuffers;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMap;
//Keeps list of Gralloc handles and associated Video Metadata Buffers
- KeyedVector<void *, camera_memory_t *> mVideoMetadataBufferMemoryMap;
- KeyedVector<void *, CameraBuffer *> mVideoMetadataBufferReverseMap;
+ android::KeyedVector<void *, camera_memory_t *> mVideoMetadataBufferMemoryMap;
+ android::KeyedVector<void *, CameraBuffer *> mVideoMetadataBufferReverseMap;
bool mBufferReleased;
- sp< NotificationThread> mNotificationThread;
+ android::sp< NotificationThread> mNotificationThread;
EventProvider *mEventProvider;
FrameProvider *mFrameProvider;
- TIUTILS::MessageQueue mEventQ;
- TIUTILS::MessageQueue mFrameQ;
+ Utils::MessageQueue mEventQ;
+ Utils::MessageQueue mFrameQ;
NotifierState mNotifierState;
bool mPreviewing;
@@ -752,12 +753,12 @@ private:
int mPreviewHeight;
int mPreviewStride;
const char *mPreviewPixelFormat;
- KeyedVector<unsigned int, sp<MemoryHeapBase> > mSharedPreviewHeaps;
- KeyedVector<unsigned int, sp<MemoryBase> > mSharedPreviewBuffers;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryHeapBase> > mSharedPreviewHeaps;
+ android::KeyedVector<unsigned int, android::sp<android::MemoryBase> > mSharedPreviewBuffers;
//Burst mode active
bool mBurst;
- mutable Mutex mRecordingLock;
+ mutable android::Mutex mRecordingLock;
bool mRecording;
bool mMeasurementEnabled;
@@ -775,7 +776,7 @@ private:
/**
* Class used for allocating memory for JPEG bit stream buffers, output buffers of camera in no overlay case
*/
-class MemoryManager : public BufferProvider, public virtual RefBase
+class MemoryManager : public BufferProvider, public virtual android::RefBase
{
public:
MemoryManager();
@@ -791,7 +792,7 @@ public:
virtual int freeBufferList(CameraBuffer * buflist);
private:
- sp<ErrorNotifier> mErrorNotifier;
+ android::sp<ErrorNotifier> mErrorNotifier;
int mIonFd;
};
@@ -803,7 +804,7 @@ private:
* Concrete classes derive from this class and provide implementations based on the specific camera h/w interface
*/
-class CameraAdapter: public FrameNotifier, public virtual RefBase
+class CameraAdapter: public FrameNotifier, public virtual android::RefBase
{
protected:
enum AdapterActiveStates {
@@ -918,8 +919,8 @@ public:
virtual void removeFramePointers() = 0;
//APIs to configure Camera adapter and get the current parameter set
- virtual int setParameters(const CameraParameters& params) = 0;
- virtual void getParameters(CameraParameters& params) = 0;
+ virtual int setParameters(const android::CameraParameters& params) = 0;
+ virtual void getParameters(android::CameraParameters& params) = 0;
//Registers callback for returning image buffers back to CameraHAL
virtual int registerImageReleaseCallback(release_image_buffers_callback callback, void *user_data) = 0;
@@ -959,7 +960,7 @@ protected:
virtual status_t rollbackState() = 0;
};
-class DisplayAdapter : public BufferProvider, public virtual RefBase
+class DisplayAdapter : public BufferProvider, public virtual android::RefBase
{
public:
///Initializes the display adapter creates any resources required
@@ -1136,7 +1137,7 @@ public:
/** Set the camera parameters. */
int setParameters(const char* params);
- int setParameters(const CameraParameters& params);
+ int setParameters(const android::CameraParameters& params);
/** Return the camera parameters. */
char* getParameters();
@@ -1221,7 +1222,7 @@ private:
//@{
/** Set the camera parameters specific to Video Recording. */
- bool setVideoModeParameters(const CameraParameters&);
+ bool setVideoModeParameters(const android::CameraParameters&);
/** Reset the camera parameters specific to Video Recording. */
bool resetVideoModeParameters();
@@ -1290,7 +1291,7 @@ private:
void forceStopPreview();
void getPreferredPreviewRes(int *width, int *height);
- void resetPreviewRes(CameraParameters *params);
+ void resetPreviewRes(android::CameraParameters *params);
// Internal __takePicture function - used in public takePicture() and reprocess()
int __takePicture(const char* params);
@@ -1314,15 +1315,15 @@ public:
static const char PARAMS_DELIMITER[];
CameraAdapter *mCameraAdapter;
- sp<AppCallbackNotifier> mAppCallbackNotifier;
- sp<DisplayAdapter> mDisplayAdapter;
- sp<MemoryManager> mMemoryManager;
+ android::sp<AppCallbackNotifier> mAppCallbackNotifier;
+ android::sp<DisplayAdapter> mDisplayAdapter;
+ android::sp<MemoryManager> mMemoryManager;
// TODO(XXX): May need to keep this as a vector in the future
// when we can have multiple tap-in/tap-out points
- sp<DisplayAdapter> mBufferSourceAdapter_In;
- sp<DisplayAdapter> mBufferSourceAdapter_Out;
+ android::sp<DisplayAdapter> mBufferSourceAdapter_In;
+ android::sp<DisplayAdapter> mBufferSourceAdapter_Out;
- sp<IMemoryHeap> mPictureHeap;
+ android::sp<android::IMemoryHeap> mPictureHeap;
int* mGrallocHandles;
bool mFpsRangeChangedByApp;
@@ -1364,13 +1365,13 @@ private:
//Index of current camera adapter
int mCameraIndex;
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
- sp<SensorListener> mSensorListener;
+ android::sp<SensorListener> mSensorListener;
void* mCameraAdapterHandle;
- CameraParameters mParameters;
+ android::CameraParameters mParameters;
bool mPreviewRunning;
bool mPreviewStateOld;
bool mRecordingEnabled;
@@ -1415,9 +1416,10 @@ private:
int mVideoWidth;
int mVideoHeight;
- String8 mCapModeBackup;
+ android::String8 mCapModeBackup;
};
-}; // namespace android
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/CameraProperties.h b/camera/inc/CameraProperties.h
index 4763b10..bfc6012 100644
--- a/camera/inc/CameraProperties.h
+++ b/camera/inc/CameraProperties.h
@@ -32,7 +32,8 @@
#include "Common.h"
-namespace android {
+namespace Ti {
+namespace Camera {
#define MAX_CAMERAS_SUPPORTED 3
#define MAX_SIMUL_CAMERAS_SUPPORTED 1
@@ -214,7 +215,7 @@ public:
private:
OperatingMode mCurrentMode;
- DefaultKeyedVector<String8, String8> mProperties[MODE_MAX];
+ android::DefaultKeyedVector<android::String8, android::String8> mProperties[MODE_MAX];
};
@@ -228,12 +229,13 @@ private:
int mCamerasSupported;
int mInitialized;
- mutable Mutex mLock;
+ mutable android::Mutex mLock;
Properties mCameraProps[MAX_CAMERAS_SUPPORTED];
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //CAMERA_PROPERTIES_H
diff --git a/camera/inc/Common.h b/camera/inc/Common.h
index 232dbbf..b369e65 100644
--- a/camera/inc/Common.h
+++ b/camera/inc/Common.h
@@ -19,6 +19,7 @@
#include "UtilsCommon.h"
#include "DebugUtils.h"
+#include "Status.h"
diff --git a/camera/inc/Encoder_libjpeg.h b/camera/inc/Encoder_libjpeg.h
index 7a8e06a..8061e48 100644
--- a/camera/inc/Encoder_libjpeg.h
+++ b/camera/inc/Encoder_libjpeg.h
@@ -39,7 +39,9 @@ extern "C" {
#define CANCEL_TIMEOUT 5000000 // 5 seconds
-namespace android {
+namespace Ti {
+namespace Camera {
+
/**
* libjpeg encoder class - uses libjpeg to encode yuv
*/
@@ -111,7 +113,7 @@ class ExifElementsTable {
bool jpeg_opened;
};
-class Encoder_libjpeg : public Thread {
+class Encoder_libjpeg : public android::Thread {
/* public member types and variables */
public:
struct params {
@@ -138,7 +140,7 @@ class Encoder_libjpeg : public Thread {
void* cookie1,
void* cookie2,
void* cookie3, void *cookie4)
- : Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
+ : android::Thread(false), mMainInput(main_jpeg), mThumbnailInput(tn_jpeg), mCb(cb),
mCancelEncoding(false), mCookie1(cookie1), mCookie2(cookie2), mCookie3(cookie3), mCookie4(cookie4),
mType(type), mThumb(NULL) {
this->incStrong(this);
@@ -151,7 +153,6 @@ class Encoder_libjpeg : public Thread {
virtual bool threadLoop() {
size_t size = 0;
- sp<Encoder_libjpeg> tn = NULL;
if (mThumbnailInput) {
// start thread to encode thumbnail
mThumb = new Encoder_libjpeg(mThumbnailInput, NULL, NULL, mType, NULL, NULL, NULL, NULL);
@@ -205,12 +206,13 @@ class Encoder_libjpeg : public Thread {
void* mCookie3;
void* mCookie4;
CameraFrame::FrameType mType;
- sp<Encoder_libjpeg> mThumb;
- Semaphore mCancelSem;
+ android::sp<Encoder_libjpeg> mThumb;
+ Utils::Semaphore mCancelSem;
size_t encode(params*);
};
-}
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/General3A_Settings.h b/camera/inc/General3A_Settings.h
index 452f412..777b255 100644
--- a/camera/inc/General3A_Settings.h
+++ b/camera/inc/General3A_Settings.h
@@ -29,7 +29,8 @@
#ifndef GENERAL_3A_SETTINGS_H
#define GENERAL_3A_SETTINGS_H
-namespace android {
+namespace Ti {
+namespace Camera {
struct userToOMX_LUT{
const char * userDefinition;
@@ -53,15 +54,15 @@ const userToOMX_LUT isoUserToOMX[] = {
};
const userToOMX_LUT effects_UserToOMX [] = {
- { CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
- { CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
- { CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
- { CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
- { CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
- { CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
- { CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
- { CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
- { CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
+ { android::CameraParameters::EFFECT_NONE, OMX_ImageFilterNone },
+ { android::CameraParameters::EFFECT_NEGATIVE, OMX_ImageFilterNegative },
+ { android::CameraParameters::EFFECT_SOLARIZE, OMX_ImageFilterSolarize },
+ { android::CameraParameters::EFFECT_SEPIA, OMX_ImageFilterSepia },
+ { android::CameraParameters::EFFECT_MONO, OMX_ImageFilterGrayScale },
+ { android::CameraParameters::EFFECT_BLACKBOARD, OMX_TI_ImageFilterBlackBoard },
+ { android::CameraParameters::EFFECT_WHITEBOARD, OMX_TI_ImageFilterWhiteBoard },
+ { android::CameraParameters::EFFECT_AQUA, OMX_TI_ImageFilterAqua },
+ { android::CameraParameters::EFFECT_POSTERIZE, OMX_TI_ImageFilterPosterize },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::EFFECT_NATURAL, OMX_ImageFilterNatural },
{ TICameraParameters::EFFECT_VIVID, OMX_ImageFilterVivid },
@@ -71,21 +72,21 @@ const userToOMX_LUT effects_UserToOMX [] = {
};
const userToOMX_LUT scene_UserToOMX [] = {
- { CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
- { CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
- { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
- { CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
- { CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
- { CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
- { CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
- { CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
- { CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
- { CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
- { CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
- { CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
- { CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
- { CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
- { CameraParameters::SCENE_MODE_SPORTS, OMX_Sport },
+ { android::CameraParameters::SCENE_MODE_AUTO, OMX_Manual },
+ { android::CameraParameters::SCENE_MODE_LANDSCAPE, OMX_Landscape },
+ { android::CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, OMX_NightPortrait },
+ { android::CameraParameters::SCENE_MODE_FIREWORKS, OMX_Fireworks },
+ { android::CameraParameters::SCENE_MODE_ACTION, OMX_TI_Action },
+ { android::CameraParameters::SCENE_MODE_BEACH, OMX_TI_Beach },
+ { android::CameraParameters::SCENE_MODE_CANDLELIGHT, OMX_TI_Candlelight },
+ { android::CameraParameters::SCENE_MODE_NIGHT, OMX_TI_Night },
+ { android::CameraParameters::SCENE_MODE_PARTY, OMX_TI_Party },
+ { android::CameraParameters::SCENE_MODE_PORTRAIT, OMX_TI_Portrait },
+ { android::CameraParameters::SCENE_MODE_SNOW, OMX_TI_Snow },
+ { android::CameraParameters::SCENE_MODE_STEADYPHOTO, OMX_TI_Steadyphoto },
+ { android::CameraParameters::SCENE_MODE_SUNSET, OMX_TI_Sunset },
+ { android::CameraParameters::SCENE_MODE_THEATRE, OMX_TI_Theatre },
+ { android::CameraParameters::SCENE_MODE_SPORTS, OMX_Sport },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::SCENE_MODE_CLOSEUP, OMX_Closeup },
{ TICameraParameters::SCENE_MODE_AQUA, OMX_Underwater },
@@ -100,14 +101,14 @@ const userToOMX_LUT scene_UserToOMX [] = {
};
const userToOMX_LUT whiteBal_UserToOMX [] = {
- { CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
- { CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
- { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
- { CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
- { CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
- { CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
- { CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
- { CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
+ { android::CameraParameters::WHITE_BALANCE_AUTO, OMX_WhiteBalControlAuto },
+ { android::CameraParameters::WHITE_BALANCE_DAYLIGHT, OMX_WhiteBalControlSunLight },
+ { android::CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, OMX_WhiteBalControlCloudy },
+ { android::CameraParameters::WHITE_BALANCE_FLUORESCENT, OMX_WhiteBalControlFluorescent },
+ { android::CameraParameters::WHITE_BALANCE_INCANDESCENT, OMX_WhiteBalControlIncandescent },
+ { android::CameraParameters::WHITE_BALANCE_SHADE, OMX_TI_WhiteBalControlShade },
+ { android::CameraParameters::WHITE_BALANCE_TWILIGHT, OMX_TI_WhiteBalControlTwilight },
+ { android::CameraParameters::WHITE_BALANCE_WARM_FLUORESCENT, OMX_TI_WhiteBalControlWarmFluorescent },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::WHITE_BALANCE_TUNGSTEN, OMX_WhiteBalControlTungsten },
{ TICameraParameters::WHITE_BALANCE_HORIZON, OMX_WhiteBalControlHorizon },
@@ -116,19 +117,19 @@ const userToOMX_LUT whiteBal_UserToOMX [] = {
};
const userToOMX_LUT antibanding_UserToOMX [] = {
- { CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
- { CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
- { CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
- { CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
+ { android::CameraParameters::ANTIBANDING_OFF, OMX_FlickerCancelOff },
+ { android::CameraParameters::ANTIBANDING_AUTO, OMX_FlickerCancelAuto },
+ { android::CameraParameters::ANTIBANDING_50HZ, OMX_FlickerCancel50 },
+ { android::CameraParameters::ANTIBANDING_60HZ, OMX_FlickerCancel60 }
};
const userToOMX_LUT focus_UserToOMX [] = {
- { CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
- { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
- { CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
- { CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
- { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
- { CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_AUTO, OMX_IMAGE_FocusControlAutoLock },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlAutoInfinity },
+ { android::CameraParameters::FOCUS_MODE_INFINITY, OMX_IMAGE_FocusControlHyperfocal },
+ { android::CameraParameters::FOCUS_MODE_MACRO, OMX_IMAGE_FocusControlAutoMacro },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, OMX_IMAGE_FocusControlAuto },
+ { android::CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, OMX_IMAGE_FocusControlAuto },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::FOCUS_MODE_FACE , OMX_IMAGE_FocusControlContinousFacePriority },
{ TICameraParameters::FOCUS_MODE_PORTRAIT, OMX_IMAGE_FocusControlPortrait },
@@ -151,11 +152,11 @@ const userToOMX_LUT exposure_UserToOMX [] = {
};
const userToOMX_LUT flash_UserToOMX [] = {
- { CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
- { CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
- { CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
- { CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
- { CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
+ { android::CameraParameters::FLASH_MODE_OFF ,OMX_IMAGE_FlashControlOff },
+ { android::CameraParameters::FLASH_MODE_ON ,OMX_IMAGE_FlashControlOn },
+ { android::CameraParameters::FLASH_MODE_AUTO ,OMX_IMAGE_FlashControlAuto },
+ { android::CameraParameters::FLASH_MODE_TORCH ,OMX_IMAGE_FlashControlTorch },
+ { android::CameraParameters::FLASH_MODE_RED_EYE ,OMX_IMAGE_FlashControlRedEyeReduction },
#ifdef OMAP_ENHANCEMENT
{ TICameraParameters::FLASH_MODE_FILL_IN ,OMX_IMAGE_FlashControlFillin }
#endif
@@ -285,6 +286,7 @@ enum E3ASettingsFlags
E3AsettingsAll = ( ((E3aSettingMax -1 ) << 1) -1 ) /// all possible flags raised
};
-};
+} // namespace Camera
+} // namespace Ti
#endif //GENERAL_3A_SETTINGS_H
diff --git a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
index 07baf16..19aea6f 100644
--- a/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
+++ b/camera/inc/OMXCameraAdapter/OMXCameraAdapter.h
@@ -48,7 +48,8 @@ extern "C"
}
-namespace android {
+namespace Ti {
+namespace Camera {
#define Q16_OFFSET 16
@@ -396,8 +397,8 @@ public:
virtual status_t initialize(CameraProperties::Properties*);
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params);
- virtual void getParameters(CameraParameters& params);
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
// API
status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
@@ -485,7 +486,7 @@ private:
OMX_IN OMX_EVENTTYPE eEvent,
OMX_IN OMX_U32 nData1,
OMX_IN OMX_U32 nData2,
- OMX_IN Semaphore &semaphore);
+ OMX_IN Utils::Semaphore &semaphore);
status_t setPictureRotation(unsigned int degree);
status_t setSensorOrientation(unsigned int degree);
@@ -499,7 +500,7 @@ private:
status_t destroyTunnel();
//EXIF
- status_t setParametersEXIF(const CameraParameters &params,
+ status_t setParametersEXIF(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t convertGPSCoord(double coord, int &deg, int &min, int &sec, int &secDivisor);
status_t setupEXIF();
@@ -516,12 +517,12 @@ private:
//Focus distances
- status_t setParametersFocus(const CameraParameters &params,
+ status_t setParametersFocus(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t addFocusDistances(OMX_U32 &near,
OMX_U32 &optimal,
OMX_U32 &far,
- CameraParameters& params);
+ android::CameraParameters& params);
status_t encodeFocusDistance(OMX_U32 dist, char *buffer, size_t length);
status_t getFocusDistances(OMX_U32 &near,OMX_U32 &optimal, OMX_U32 &far);
@@ -530,15 +531,15 @@ private:
status_t enableVideoStabilization(bool enable);
//Digital zoom
- status_t setParametersZoom(const CameraParameters &params,
+ status_t setParametersZoom(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
status_t doZoom(int index);
status_t advanceZoom();
//3A related parameters
- status_t setParameters3A(const CameraParameters &params,
+ status_t setParameters3A(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
- void declareParameter3ABool(const CameraParameters &params, const char *key,
+ void declareParameter3ABool(const android::CameraParameters &params, const char *key,
OMX_BOOL &current_setting, E3ASettingsFlags pending,
const char *msg);
@@ -601,7 +602,7 @@ private:
//API to set FrameRate using VFR interface
status_t setVFramerate(OMX_U32 minFrameRate,OMX_U32 maxFrameRate);
- status_t setParametersAlgo(const CameraParameters &params,
+ status_t setParametersAlgo(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
//Noise filtering
@@ -622,13 +623,13 @@ private:
status_t setTouchFocus();
//Face detection
- status_t setParametersFD(const CameraParameters &params,
+ status_t setParametersFD(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
- status_t updateFocusDistances(CameraParameters &params);
+ status_t updateFocusDistances(android::CameraParameters &params);
status_t setFaceDetectionOrientation(OMX_U32 orientation);
status_t setFaceDetection(bool enable, OMX_U32 orientation);
status_t createPreviewMetadata(OMX_BUFFERHEADERTYPE* pBuffHeader,
- sp<CameraMetadataResult> &result,
+ android::sp<CameraMetadataResult> &result,
size_t previewWidth,
size_t previewHeight);
status_t encodeFaceCoordinates(const OMX_FACEDETECTIONTYPE *faceData,
@@ -652,7 +653,7 @@ private:
static status_t encodeISOCap(OMX_U32, const CapISO*, size_t, char*, size_t);
static size_t encodeZoomCap(OMX_S32, const CapZoom*, size_t, char*, size_t);
static void encodeFrameRates(int minFrameRate, int maxFrameRate, const OMX_TI_CAPTYPE & caps,
- const CapFramerate * fixedFrameRates, int frameRateCount, Vector<FpsRange> & fpsRanges);
+ const CapFramerate * fixedFrameRates, int frameRateCount, android::Vector<FpsRange> & fpsRanges);
static status_t encodeImageCodingFormatCap(OMX_IMAGE_CODINGTYPE,
const CapCodingFormat *,
size_t,
@@ -706,7 +707,7 @@ private:
const OMX_TI_CAPTYPE &caps);
static status_t insertRaw(CameraProperties::Properties*, OMX_TI_CAPTYPE&);
- status_t setParametersCapture(const CameraParameters &params,
+ status_t setParametersCapture(const android::CameraParameters &params,
BaseCameraAdapter::AdapterState state);
//Exposure Bracketing
@@ -754,14 +755,14 @@ private:
status_t apply3Asettings( Gen3A_settings& Gen3A );
// AutoConvergence
- status_t setAutoConvergence(const char *valstr, const char *pValManualstr, const CameraParameters &params);
+ status_t setAutoConvergence(const char *valstr, const char *pValManualstr, const android::CameraParameters &params);
status_t setExtraData(bool enable, OMX_U32, OMX_EXT_EXTRADATATYPE);
OMX_OTHER_EXTRADATATYPE *getExtradata(const OMX_PTR ptrPrivate, OMX_EXTRADATATYPE type) const;
// Meta data
#ifdef OMAP_ENHANCEMENT_CPCAM
- status_t setMetaData(CameraMetadata &meta_data, const OMX_PTR plat_pvt) const;
+ status_t setMetaData(android::CameraMetadata &meta_data, const OMX_PTR plat_pvt) const;
#endif
void encodePreviewMetadata(camera_frame_metadata_t *meta, const OMX_PTR plat_pvt);
@@ -786,17 +787,17 @@ private:
status_t deinitInternalBuffers (OMX_U32);
// Reprocess Methods -- implementation in OMXReprocess.cpp
- status_t setParametersReprocess(const CameraParameters &params, CameraBuffer* bufs,
+ status_t setParametersReprocess(const android::CameraParameters &params, CameraBuffer* bufs,
BaseCameraAdapter::AdapterState state);
status_t startReprocess();
status_t disableReprocess();
status_t stopReprocess();
status_t UseBuffersReprocess(CameraBuffer *bufArr, int num);
- class CommandHandler : public Thread {
+ class CommandHandler : public android::Thread {
public:
CommandHandler(OMXCameraAdapter* ca)
- : Thread(false), mCameraAdapter(ca) { }
+ : android::Thread(false), mCameraAdapter(ca) { }
virtual bool threadLoop() {
bool ret;
@@ -804,14 +805,14 @@ private:
return ret;
}
- status_t put(TIUTILS::Message* msg){
- Mutex::Autolock lock(mLock);
+ status_t put(Utils::Message* msg){
+ android::AutoMutex lock(mLock);
return mCommandMsgQ.put(msg);
}
void clearCommandQ()
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.clear();
}
@@ -825,15 +826,15 @@ private:
private:
bool Handler();
- TIUTILS::MessageQueue mCommandMsgQ;
+ Utils::MessageQueue mCommandMsgQ;
OMXCameraAdapter* mCameraAdapter;
- Mutex mLock;
+ android::Mutex mLock;
};
- sp<CommandHandler> mCommandHandler;
+ android::sp<CommandHandler> mCommandHandler;
public:
- class OMXCallbackHandler : public Thread {
+ class OMXCallbackHandler : public android::Thread {
public:
OMXCallbackHandler(OMXCameraAdapter* ca)
: Thread(false), mCameraAdapter(ca)
@@ -847,15 +848,15 @@ public:
return ret;
}
- status_t put(TIUTILS::Message* msg){
- Mutex::Autolock lock(mLock);
+ status_t put(Utils::Message* msg){
+ android::AutoMutex lock(mLock);
mIsProcessed = false;
return mCommandMsgQ.put(msg);
}
void clearCommandQ()
{
- Mutex::Autolock lock(mLock);
+ android::AutoMutex lock(mLock);
mCommandMsgQ.clear();
}
@@ -869,14 +870,14 @@ public:
private:
bool Handler();
- TIUTILS::MessageQueue mCommandMsgQ;
+ Utils::MessageQueue mCommandMsgQ;
OMXCameraAdapter* mCameraAdapter;
- Mutex mLock;
- Condition mCondition;
+ android::Mutex mLock;
+ android::Condition mCondition;
bool mIsProcessed;
};
- sp<OMXCallbackHandler> mOMXCallbackHandler;
+ android::sp<OMXCallbackHandler> mOMXCallbackHandler;
private:
@@ -987,23 +988,23 @@ private:
char mFocusDistBuffer[FOCUS_DIST_BUFFER_SIZE];
// Current Focus areas
- Vector< sp<CameraArea> > mFocusAreas;
- mutable Mutex mFocusAreasLock;
+ android::Vector<android::sp<CameraArea> > mFocusAreas;
+ mutable android::Mutex mFocusAreasLock;
// Current Touch convergence areas
- Vector< sp<CameraArea> > mTouchAreas;
- mutable Mutex mTouchAreasLock;
+ android::Vector<android::sp<CameraArea> > mTouchAreas;
+ mutable android::Mutex mTouchAreasLock;
// Current Metering areas
- Vector< sp<CameraArea> > mMeteringAreas;
- mutable Mutex mMeteringAreasLock;
+ android::Vector<android::sp<CameraArea> > mMeteringAreas;
+ mutable android::Mutex mMeteringAreasLock;
OperatingMode mCapabilitiesOpMode;
CaptureMode mCapMode;
// TODO(XXX): Do we really need this lock? Let's
// try to merge temporal bracketing and burst
// capture later
- mutable Mutex mBurstLock;
+ mutable android::Mutex mBurstLock;
size_t mBurstFrames;
size_t mBurstFramesAccum;
size_t mBurstFramesQueued;
@@ -1023,13 +1024,13 @@ private:
int mZoomBracketingValues[ZOOM_BRACKET_RANGE];
size_t mZoomBracketingValidEntries;
- mutable Mutex mFaceDetectionLock;
+ mutable android::Mutex mFaceDetectionLock;
//Face detection status
bool mFaceDetectionRunning;
bool mFaceDetectionPaused;
bool mFDSwitchAlgoPriority;
- camera_face_t faceDetectionLastOutput [MAX_NUM_FACES_SUPPORTED];
+ camera_face_t faceDetectionLastOutput[MAX_NUM_FACES_SUPPORTED];
int faceDetectionNumFacesLastOutput;
//Geo-tagging
@@ -1060,7 +1061,7 @@ private:
unsigned int mZoomParameterIdx;
//current zoom
- Mutex mZoomLock;
+ android::Mutex mZoomLock;
unsigned int mCurrentZoomIdx, mTargetZoomIdx, mPreviousZoomIndx;
bool mZoomUpdating, mZoomUpdate;
int mZoomInc;
@@ -1071,7 +1072,7 @@ private:
OMX_VERSIONTYPE mLocalVersionParam;
unsigned int mPending3Asettings;
- Mutex m3ASettingsUpdateLock;
+ android::Mutex m3ASettingsUpdateLock;
Gen3A_settings mParameters3A;
const char *mPictureFormatFromClient;
@@ -1081,7 +1082,7 @@ private:
OMX_TI_CONFIG_3A_FACE_PRIORITY mFacePriority;
OMX_TI_CONFIG_3A_REGION_PRIORITY mRegionPriority;
- CameraParameters mParams;
+ android::CameraParameters mParams;
CameraProperties::Properties* mCapabilities;
unsigned int mPictureRotation;
bool mWaitingForSnapshot;
@@ -1095,7 +1096,7 @@ private:
//Temporal bracketing management data
bool mBracketingSet;
- mutable Mutex mBracketingLock;
+ mutable android::Mutex mBracketingLock;
bool *mBracketingBuffersQueued;
int mBracketingBuffersQueuedCount;
int mLastBracetingBufferIdx;
@@ -1103,31 +1104,31 @@ private:
bool mZoomBracketingEnabled;
size_t mBracketingRange;
int mCurrentZoomBracketing;
- CameraParameters mParameters;
+ android::CameraParameters mParameters;
bool mOmxInitialized;
OMXCameraAdapterComponentContext mCameraAdapterParameters;
bool mFirstTimeInit;
///Semaphores used internally
- Semaphore mInitSem;
- Semaphore mFlushSem;
- Semaphore mUsePreviewDataSem;
- Semaphore mUsePreviewSem;
- Semaphore mUseCaptureSem;
- Semaphore mStartPreviewSem;
- Semaphore mStopPreviewSem;
- Semaphore mStartCaptureSem;
- Semaphore mStopCaptureSem;
- Semaphore mSwitchToLoadedSem;
- Semaphore mSwitchToExecSem;
- Semaphore mStopReprocSem;
- Semaphore mUseReprocessSem;
-
- mutable Mutex mStateSwitchLock;
- mutable Mutex mIdleStateSwitchLock;
-
- Vector<struct TIUTILS::Message *> mEventSignalQ;
- Mutex mEventLock;
+ Utils::Semaphore mInitSem;
+ Utils::Semaphore mFlushSem;
+ Utils::Semaphore mUsePreviewDataSem;
+ Utils::Semaphore mUsePreviewSem;
+ Utils::Semaphore mUseCaptureSem;
+ Utils::Semaphore mStartPreviewSem;
+ Utils::Semaphore mStopPreviewSem;
+ Utils::Semaphore mStartCaptureSem;
+ Utils::Semaphore mStopCaptureSem;
+ Utils::Semaphore mSwitchToLoadedSem;
+ Utils::Semaphore mSwitchToExecSem;
+ Utils::Semaphore mStopReprocSem;
+ Utils::Semaphore mUseReprocessSem;
+
+ mutable android::Mutex mStateSwitchLock;
+ mutable android::Mutex mIdleStateSwitchLock;
+
+ android::Vector<Utils::Message *> mEventSignalQ;
+ android::Mutex mEventLock;
OMX_STATETYPE mComponentState;
@@ -1149,15 +1150,15 @@ private:
int mLastFrameCount;
unsigned int mIter;
nsecs_t mLastFPSTime;
- Mutex mFrameCountMutex;
- Condition mFirstFrameCondition;
+ android::Mutex mFrameCountMutex;
+ android::Condition mFirstFrameCondition;
static const nsecs_t CANCEL_AF_TIMEOUT;
- Mutex mCancelAFMutex;
- Condition mCancelAFCond;
+ android::Mutex mCancelAFMutex;
+ android::Condition mCancelAFCond;
- Mutex mDoAFMutex;
- Condition mDoAFCond;
+ android::Mutex mDoAFMutex;
+ android::Condition mDoAFCond;
size_t mSensorIndex;
CodingMode mCodingMode;
@@ -1166,7 +1167,7 @@ private:
OMX_TICKS mTimeSourceDelta;
bool onlyOnce;
- Semaphore mCaptureSem;
+ Utils::Semaphore mCaptureSem;
bool mCaptureSignalled;
OMX_BOOL mUserSetExpLock;
@@ -1180,13 +1181,16 @@ private:
bool mSetFormatDone;
OMX_TI_DCCDATATYPE mDccData;
- Mutex mDccDataLock;
+ android::Mutex mDccDataLock;
int mMaxZoomSupported;
- Mutex mImageCaptureLock;
+ android::Mutex mImageCaptureLock;
bool mTunnelDestroyed;
};
-}; //// namespace
+
+} // namespace Camera
+} // namespace Ti
+
#endif //OMX_CAMERA_ADAPTER_H
diff --git a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
index 2b66988..d57843e 100644
--- a/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
+++ b/camera/inc/OMXCameraAdapter/OMXSceneModeTables.h
@@ -32,6 +32,9 @@
#define ARRAY_SIZE(array) (sizeof((array)) / sizeof((array)[0]))
#endif
+namespace Ti {
+namespace Camera {
+
struct SceneModesEntry {
OMX_SCENEMODETYPE scene;
OMX_IMAGE_FLASHCONTROLTYPE flash;
@@ -544,4 +547,7 @@ static const CameraToSensorModesLUTEntry CameraToSensorModesLUT [] = {
{ "OV5650", OV5650_SceneModesLUT, ARRAY_SIZE(OV5650_SceneModesLUT)},
};
+} // namespace Camera
+} // namespace Ti
+
#endif
diff --git a/camera/inc/SensorListener.h b/camera/inc/SensorListener.h
index f36ea0d..44037b7 100644
--- a/camera/inc/SensorListener.h
+++ b/camera/inc/SensorListener.h
@@ -32,7 +32,8 @@
#include "Common.h"
-namespace android {
+namespace Ti {
+namespace Camera {
/**
* SensorListner class - Registers with sensor manager to get sensor events
@@ -40,11 +41,11 @@ namespace android {
typedef void (*orientation_callback_t) (uint32_t orientation, uint32_t tilt, void* cookie);
-class SensorLooperThread : public Thread {
+class SensorLooperThread : public android::Thread {
public:
- SensorLooperThread(Looper* looper)
+ SensorLooperThread(android::Looper* looper)
: Thread(false) {
- mLooper = sp<Looper>(looper);
+ mLooper = android::sp<android::Looper>(looper);
}
~SensorLooperThread() {
mLooper.clear();
@@ -60,11 +61,11 @@ class SensorLooperThread : public Thread {
mLooper->wake();
}
private:
- sp<Looper> mLooper;
+ android::sp<android::Looper> mLooper;
};
-class SensorListener : public RefBase
+class SensorListener : public android::RefBase
{
/* public - types */
public:
@@ -87,17 +88,18 @@ public:
void handleOrientation(uint32_t orientation, uint32_t tilt);
/* public - member variables */
public:
- sp<SensorEventQueue> mSensorEventQueue;
+ android::sp<android::SensorEventQueue> mSensorEventQueue;
/* private - member variables */
private:
int sensorsEnabled;
orientation_callback_t mOrientationCb;
void *mCbCookie;
- sp<Looper> mLooper;
- sp<SensorLooperThread> mSensorLooperThread;
- Mutex mLock;
+ android::sp<android::Looper> mLooper;
+ android::sp<SensorLooperThread> mSensorLooperThread;
+ android::Mutex mLock;
};
-}
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/TICameraParameters.h b/camera/inc/TICameraParameters.h
index 71b24c0..84fa9fa 100644
--- a/camera/inc/TICameraParameters.h
+++ b/camera/inc/TICameraParameters.h
@@ -20,7 +20,8 @@
#include <utils/KeyedVector.h>
#include <utils/String8.h>
-namespace android {
+namespace Ti {
+namespace Camera {
///TI Specific Camera Parameters
class TICameraParameters
@@ -252,6 +253,7 @@ static const char KEY_ALGO_GIC[];
};
-};
+} // namespace Camera
+} // namespace Ti
#endif
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
index 3c26bfd..cf277ac 100644
--- a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -25,7 +25,8 @@
#include "BaseCameraAdapter.h"
#include "DebugUtils.h"
-namespace android {
+namespace Ti {
+namespace Camera {
#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
@@ -106,8 +107,8 @@ public:
virtual status_t initialize(CameraProperties::Properties*);
//APIs to configure Camera adapter and get the current parameter set
- virtual status_t setParameters(const CameraParameters& params);
- virtual void getParameters(CameraParameters& params);
+ virtual status_t setParameters(const android::CameraParameters& params);
+ virtual void getParameters(android::CameraParameters& params);
// API
virtual status_t UseBuffersPreview(CameraBuffer *bufArr, int num);
@@ -134,13 +135,13 @@ protected:
private:
- class PreviewThread : public Thread {
+ class PreviewThread : public android::Thread {
V4LCameraAdapter* mAdapter;
public:
PreviewThread(V4LCameraAdapter* hw) :
Thread(false), mAdapter(hw) { }
virtual void onFirstRef() {
- run("CameraPreviewThread", PRIORITY_URGENT_DISPLAY);
+ run("CameraPreviewThread", android::PRIORITY_URGENT_DISPLAY);
}
virtual bool threadLoop() {
mAdapter->previewThread();
@@ -198,17 +199,17 @@ private:
int mPreviewBufferCountQueueable;
int mCaptureBufferCount;
int mCaptureBufferCountQueueable;
- KeyedVector<CameraBuffer *, int> mPreviewBufs;
- KeyedVector<CameraBuffer *, int> mCaptureBufs;
- mutable Mutex mPreviewBufsLock;
- mutable Mutex mCaptureBufsLock;
- mutable Mutex mStopPreviewLock;
+ android::KeyedVector<CameraBuffer *, int> mPreviewBufs;
+ android::KeyedVector<CameraBuffer *, int> mCaptureBufs;
+ mutable android::Mutex mPreviewBufsLock;
+ mutable android::Mutex mCaptureBufsLock;
+ mutable android::Mutex mStopPreviewLock;
- CameraParameters mParams;
+ android::CameraParameters mParams;
bool mPreviewing;
bool mCapturing;
- Mutex mLock;
+ android::Mutex mLock;
int mFrameCount;
int mLastFrameCount;
@@ -221,7 +222,7 @@ private:
int mSensorIndex;
// protected by mLock
- sp<PreviewThread> mPreviewThread;
+ android::sp<PreviewThread> mPreviewThread;
struct VideoInfo *mVideoInfo;
int mCameraHandle;
@@ -230,6 +231,8 @@ private:
int nDequeued;
};
-}; //// namespace
-#endif //V4L_CAMERA_ADAPTER_H
+} // namespace Camera
+} // namespace Ti
+
+#endif //V4L_CAMERA_ADAPTER_H
diff --git a/libtiutils/DebugUtils.cpp b/libtiutils/DebugUtils.cpp
index 0165118..60ad0c8 100644
--- a/libtiutils/DebugUtils.cpp
+++ b/libtiutils/DebugUtils.cpp
@@ -53,7 +53,7 @@ Debug::Debug()
void Debug::grow()
{
- android::Mutex::Autolock locker(mMutex);
+ android::AutoMutex locker(mMutex);
(void)locker;
const int size = kDebugThreadInfoGrowSize;
diff --git a/libtiutils/ErrorUtils.cpp b/libtiutils/ErrorUtils.cpp
index df0e51c..e30fcfd 100644
--- a/libtiutils/ErrorUtils.cpp
+++ b/libtiutils/ErrorUtils.cpp
@@ -17,7 +17,8 @@
#include "ErrorUtils.h"
-namespace android {
+namespace Ti {
+namespace Utils {
/**
@brief Method to convert from POSIX to Android errors
@@ -135,7 +136,5 @@ status_t ErrorUtils::omxToAndroidError(OMX_ERRORTYPE error)
}
-};
-
-
-
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/ErrorUtils.h b/libtiutils/ErrorUtils.h
index 204ec97..c6c23a2 100644
--- a/libtiutils/ErrorUtils.h
+++ b/libtiutils/ErrorUtils.h
@@ -23,6 +23,8 @@
///Header file where all the OMX error codes are defined
#include "OMX_Core.h"
+#include "Status.h"
+
extern "C"
{
@@ -30,7 +32,8 @@ extern "C"
#include "timm_osal_error.h"
};
-namespace android {
+namespace Ti {
+namespace Utils {
///Generic class with static methods to convert any standard error type to Android error type
class ErrorUtils
@@ -47,6 +50,7 @@ public:
};
-};
+} // namespace Utils
+} // namespace Ti
#endif /// ERROR_UTILS_H
diff --git a/libtiutils/MessageQueue.cpp b/libtiutils/MessageQueue.cpp
index 997add2..13b1d53 100644
--- a/libtiutils/MessageQueue.cpp
+++ b/libtiutils/MessageQueue.cpp
@@ -29,7 +29,8 @@
#include "MessageQueue.h"
-namespace TIUTILS {
+namespace Ti {
+namespace Utils {
/**
@brief Constructor for the message queue class
@@ -414,4 +415,5 @@ android::status_t MessageQueue::waitForMsg(MessageQueue *queue1, MessageQueue *q
return ret;
}
-};
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/MessageQueue.h b/libtiutils/MessageQueue.h
index 7d6581c..a0a3468 100644
--- a/libtiutils/MessageQueue.h
+++ b/libtiutils/MessageQueue.h
@@ -33,7 +33,8 @@
#define MSGQ_LOGEA DBGUTILS_LOGEA
#define MSGQ_LOGEB DBGUTILS_LOGEB
-namespace TIUTILS {
+namespace Ti {
+namespace Utils {
///Message type
struct Message
@@ -88,6 +89,19 @@ private:
bool mHasMsg;
};
-};
+} // namespace Utils
+} // namespace Ti
+
+
+
+
+// FIXME: Temporary workaround to avoid namespace fixes in VTC test code right now.
+namespace TIUTILS {
+ typedef Ti::Utils::Message Message;
+ typedef Ti::Utils::MessageQueue MessageQueue;
+}
+
+
+
#endif
diff --git a/libtiutils/Semaphore.cpp b/libtiutils/Semaphore.cpp
index 37f3a89..512eee3 100644
--- a/libtiutils/Semaphore.cpp
+++ b/libtiutils/Semaphore.cpp
@@ -21,7 +21,8 @@
#include <utils/Log.h>
#include <time.h>
-namespace android {
+namespace Ti {
+namespace Utils {
/**
@brief Constructor for the semaphore class
@@ -227,6 +228,5 @@ status_t Semaphore::WaitTimeout(int timeoutMicroSecs)
}
-};
-
-
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/Semaphore.h b/libtiutils/Semaphore.h
index 6990848..8d64f3f 100644
--- a/libtiutils/Semaphore.h
+++ b/libtiutils/Semaphore.h
@@ -24,7 +24,10 @@
#include <string.h>
#include <unistd.h>
-namespace android {
+#include "Status.h"
+
+namespace Ti {
+namespace Utils {
class Semaphore
{
@@ -56,4 +59,5 @@ private:
};
-};
+} // namespace Utils
+} // namespace Ti
diff --git a/libtiutils/Status.h b/libtiutils/Status.h
new file mode 100644
index 0000000..ded2cec
--- /dev/null
+++ b/libtiutils/Status.h
@@ -0,0 +1,67 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef TI_UTILS_STATUS_H
+#define TI_UTILS_STATUS_H
+
+#include <utils/Errors.h>
+
+#include "UtilsCommon.h"
+
+
+
+
+namespace Ti {
+
+
+
+
+typedef int status_t;
+
+#define TI_CAMERA_DEFINE_STATUS_CODE(x) x = android::x,
+enum {
+ TI_CAMERA_DEFINE_STATUS_CODE(OK)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_ERROR)
+ TI_CAMERA_DEFINE_STATUS_CODE(UNKNOWN_ERROR)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_MEMORY)
+ TI_CAMERA_DEFINE_STATUS_CODE(INVALID_OPERATION)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_VALUE)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_TYPE)
+ TI_CAMERA_DEFINE_STATUS_CODE(NAME_NOT_FOUND)
+ TI_CAMERA_DEFINE_STATUS_CODE(PERMISSION_DENIED)
+ TI_CAMERA_DEFINE_STATUS_CODE(NO_INIT)
+ TI_CAMERA_DEFINE_STATUS_CODE(ALREADY_EXISTS)
+ TI_CAMERA_DEFINE_STATUS_CODE(DEAD_OBJECT)
+ TI_CAMERA_DEFINE_STATUS_CODE(FAILED_TRANSACTION)
+ TI_CAMERA_DEFINE_STATUS_CODE(JPARKS_BROKE_IT)
+ TI_CAMERA_DEFINE_STATUS_CODE(BAD_INDEX)
+ TI_CAMERA_DEFINE_STATUS_CODE(NOT_ENOUGH_DATA)
+ TI_CAMERA_DEFINE_STATUS_CODE(WOULD_BLOCK)
+ TI_CAMERA_DEFINE_STATUS_CODE(TIMED_OUT)
+ TI_CAMERA_DEFINE_STATUS_CODE(UNKNOWN_TRANSACTION)
+ TI_CAMERA_DEFINE_STATUS_CODE(FDS_NOT_ALLOWED)
+};
+#undef TI_CAMERA_DEFINE_STATUS_CODE
+
+
+
+
+} // namespace Ti
+
+
+
+
+#endif // TI_UTILS_STATUS_H
diff --git a/libtiutils/UtilsCommon.h b/libtiutils/UtilsCommon.h
index 571bc81..8aaeee7 100644
--- a/libtiutils/UtilsCommon.h
+++ b/libtiutils/UtilsCommon.h
@@ -22,7 +22,7 @@
-namespace android {
+namespace Ti {
@@ -91,7 +91,7 @@ inline T abs(const T & x) {
-} // namespace android
+} // namespace Ti