summaryrefslogtreecommitdiffstats
path: root/camera
diff options
context:
space:
mode:
authorAndriy Chepurnyy <x0155536@ti.com>2012-08-29 14:37:46 +0300
committerDaniel Levin <dendy@ti.com>2012-11-26 20:17:15 +0200
commit45588315e9b4abc53b7fbf4f5032eed54730e40f (patch)
tree300ee55501640309acc653847e749495ae39bec4 /camera
parent143c25c0f2ec3f84be5fe1c9aa85e0006a19707f (diff)
downloadhardware_ti_omap4-45588315e9b4abc53b7fbf4f5032eed54730e40f.tar.gz
hardware_ti_omap4-45588315e9b4abc53b7fbf4f5032eed54730e40f.tar.bz2
hardware_ti_omap4-45588315e9b4abc53b7fbf4f5032eed54730e40f.zip
CameraHal: Add support of HW MJPEG decoder in CameraHAL
Tis patch adds Ducati decoder support on CameraHAL level. - Added FrameDecoder - that used as base class for hw/sw decoders. - SwFrameDecoder - class that capable for sw MJPEG decoding - OmxFrameDecoder - class that capable for hardware MJPEG/H264 decoding (using Ducati decoder) - FrameDecoder is now used generic interface for decoder in V4l camera adapter - Added ability to change video stream type for V4L camera. Change-Id: Ia632cb0e202752b05a6994acc90c7f10ee851a34 Signed-off-by: Andriy Chepurnyy <x0155536@ti.com>
Diffstat (limited to 'camera')
-rwxr-xr-xcamera/Android.mk6
-rw-r--r--camera/CameraHal.cpp5
-rw-r--r--camera/DecoderFactory.cpp59
-rw-r--r--camera/FrameDecoder.cpp204
-rw-r--r--camera/OmxFrameDecoder.cpp1056
-rw-r--r--camera/SwFrameDecoder.cpp85
-rwxr-xr-xcamera/V4LCameraAdapter/V4LCameraAdapter.cpp474
-rw-r--r--camera/inc/DecoderFactory.h35
-rw-r--r--camera/inc/FrameDecoder.h173
-rw-r--r--camera/inc/OmxFrameDecoder.h199
-rw-r--r--camera/inc/SwFrameDecoder.h47
-rwxr-xr-xcamera/inc/V4LCameraAdapter/V4LCameraAdapter.h33
12 files changed, 2231 insertions, 145 deletions
diff --git a/camera/Android.mk b/camera/Android.mk
index 099306b..ffed64a 100755
--- a/camera/Android.mk
+++ b/camera/Android.mk
@@ -83,7 +83,11 @@ TI_CAMERAHAL_COMMON_SRC := \
NV12_resize.cpp \
CameraParameters.cpp \
TICameraParameters.cpp \
- CameraHalCommon.cpp
+ CameraHalCommon.cpp \
+ FrameDecoder.cpp \
+ SwFrameDecoder.cpp \
+ OmxFrameDecoder.cpp \
+ DecoderFactory.cpp
TI_CAMERAHAL_OMX_SRC := \
OMXCameraAdapter/OMX3A.cpp \
diff --git a/camera/CameraHal.cpp b/camera/CameraHal.cpp
index 369c398..a605026 100644
--- a/camera/CameraHal.cpp
+++ b/camera/CameraHal.cpp
@@ -35,7 +35,7 @@ namespace Ti {
namespace Camera {
extern "C" CameraAdapter* OMXCameraAdapter_Factory(size_t);
-extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t);
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t, CameraHal*);
/*****************************************************************************/
@@ -2052,7 +2052,6 @@ status_t CameraHal::setPreviewWindow(struct preview_stream_ops *window)
CAMHAL_LOGE("Can't apply locking policy on AppCallbackNotifier");
CAMHAL_ASSERT(0);
}
-
mDisplayAdapter = displayAdapter;
#ifdef OMAP_ENHANCEMENT
mDisplayAdapter->setExtendedOps(mExtendedPreviewStreamOps);
@@ -4095,7 +4094,7 @@ status_t CameraHal::initialize(CameraProperties::Properties* properties)
if (strcmp(sensor_name, V4L_CAMERA_NAME_USB) == 0) {
#ifdef V4L_CAMERA_ADAPTER
- mCameraAdapter = V4LCameraAdapter_Factory(sensor_index);
+ mCameraAdapter = V4LCameraAdapter_Factory(sensor_index, this);
#endif
}
else {
diff --git a/camera/DecoderFactory.cpp b/camera/DecoderFactory.cpp
new file mode 100644
index 0000000..846fda4
--- /dev/null
+++ b/camera/DecoderFactory.cpp
@@ -0,0 +1,59 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "FrameDecoder.h"
+#include "SwFrameDecoder.h"
+#include "OmxFrameDecoder.h"
+#include "CameraHal.h"
+#include "DecoderFactory.h"
+
+namespace Ti {
+namespace Camera {
+
+
+FrameDecoder* DecoderFactory::createDecoderByType(DecoderType type, bool forceSwDecoder) {
+ FrameDecoder* decoder = NULL;
+ switch (type) {
+ case DecoderType_MJPEG: {
+
+ if (!forceSwDecoder) {
+ decoder = new OmxFrameDecoder(DecoderType_MJPEG);
+ CAMHAL_LOGD("Using HW Decoder for MJPEG");
+ } else {
+ decoder = new SwFrameDecoder();
+ CAMHAL_LOGD("Using SW Decoder for MJPEG");
+ }
+
+ //TODO add logic that handle verification is HW Decoder is available ?
+ // And if no - create SW decoder.
+ break;
+ }
+ case DecoderType_H264: {
+ decoder = new OmxFrameDecoder(DecoderType_H264);
+ CAMHAL_LOGD("Using HW Decoder for H264");
+ break;
+ }
+ default: {
+ CAMHAL_LOGE("Unrecognized decoder type %d", type);
+ }
+ }
+
+ return decoder;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/FrameDecoder.cpp b/camera/FrameDecoder.cpp
new file mode 100644
index 0000000..80b4946
--- /dev/null
+++ b/camera/FrameDecoder.cpp
@@ -0,0 +1,204 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Common.h"
+#include "FrameDecoder.h"
+
+
+namespace Ti {
+namespace Camera {
+
+FrameDecoder::FrameDecoder()
+: mCameraHal(NULL), mState(DecoderState_Uninitialized) {
+}
+
+FrameDecoder::~FrameDecoder() {
+}
+
+status_t FrameDecoder::start() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ status_t ret;
+ if (mState == DecoderState_Running) {
+ return NO_INIT;
+ }
+ ret = doStart();
+ if (ret == NO_ERROR) {
+ mState = DecoderState_Running;
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+void FrameDecoder::stop() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState >= DecoderState_Requested_Stop) {
+ return;
+ }
+ mState = DecoderState_Requested_Stop;
+ doStop();
+ mState = DecoderState_Stoppped;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::release() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState <= DecoderState_Requested_Stop) {
+ return;
+ }
+ doRelease();
+ mState = DecoderState_Uninitialized;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::flush() {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState <= DecoderState_Requested_Stop) {
+ return;
+ }
+ doFlush();
+ mInQueue.clear();
+ mOutQueue.clear();
+
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void FrameDecoder::configure(const DecoderParameters& params) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+ if (mState == DecoderState_Running) {
+ return;
+ }
+ mParams = params;
+ mInQueue.reserve(mParams.inputBufferCount);
+ mOutQueue.reserve(mParams.outputBufferCount);
+ doConfigure(params);
+ mState = DecoderState_Initialized;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t FrameDecoder::dequeueInputBuffer(int &id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ for (size_t i = 0; i < mInQueue.size(); i++) {
+ int index = mInQueue[i];
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(in->getLock());
+ if (in->getStatus() == BufferStatus_InDecoded) {
+ id = index;
+ in->setStatus(BufferStatus_Unknown);
+ mInQueue.removeAt(i);
+ return NO_ERROR;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return INVALID_OPERATION;
+}
+
+status_t FrameDecoder::dequeueOutputBuffer(int &id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ for (size_t i = 0; i < mOutQueue.size(); i++) {
+ int index = mOutQueue[i];
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(out->getLock());
+ if (out->getStatus() == BufferStatus_OutFilled) {
+ id = index;
+ out->setStatus(BufferStatus_Unknown);
+ mOutQueue.removeAt(i);
+ return NO_ERROR;
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return INVALID_OPERATION;
+}
+
+status_t FrameDecoder::queueOutputBuffer(int index) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ //We queue all available buffers to Decoder not in recording mode - before start
+ if (mState > DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(out->getLock());
+ out->setStatus(BufferStatus_OutQueued);
+ mOutQueue.push_back(index);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t FrameDecoder::queueInputBuffer(int id) {
+ LOG_FUNCTION_NAME;
+
+ android::AutoMutex lock(mLock);
+
+ if (mState != DecoderState_Running) {
+ CAMHAL_LOGE("Try to use Decoder not in RUNNING state");
+ return INVALID_OPERATION;
+ }
+
+ {
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(id);
+ android::AutoMutex bufferLock(in->getLock());
+ in->setStatus(BufferStatus_InQueued);
+ mInQueue.push_back(id);
+ }
+
+ // Since we got queued buffer - we can process it
+ doProcessInputBuffer();
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/OmxFrameDecoder.cpp b/camera/OmxFrameDecoder.cpp
new file mode 100644
index 0000000..bd429c7
--- /dev/null
+++ b/camera/OmxFrameDecoder.cpp
@@ -0,0 +1,1056 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "ErrorUtils.h"
+#include "OmxFrameDecoder.h"
+#include "OMX_TI_IVCommon.h"
+#include "OMX_TI_Index.h"
+
+namespace Ti {
+namespace Camera {
+
+const static uint32_t kMaxColorFormatSupported = 1000;
+const static int kMaxStateSwitchTimeOut = 1 * 1000 * 1000 * 1000; // 1 sec
+
+static const char* gDecoderRole[2] = {"video_decoder.mjpeg", "video_decoder.avc"};
+static const OMX_VIDEO_CODINGTYPE gCompressionFormat[2] = {OMX_VIDEO_CodingMJPEG, OMX_VIDEO_CodingAVC};
+
+
+template<class T>
+static void InitOMXParams(T *params) {
+ params->nSize = sizeof(T);
+ params->nVersion.s.nVersionMajor = 1;
+ params->nVersion.s.nVersionMinor = 0;
+ params->nVersion.s.nRevision = 0;
+ params->nVersion.s.nStep = 0;
+}
+
+
+
+CallbackDispatcher::CallbackDispatcher()
+: mDone(false) {
+ mThread = new CallbackDispatcherThread(this);
+ mThread->run("OMXCallbackDisp", ANDROID_PRIORITY_FOREGROUND);
+}
+
+CallbackDispatcher::~CallbackDispatcher() {
+ {
+ android::Mutex::Autolock autoLock(mLock);
+
+ mDone = true;
+ mQueueChanged.signal();
+ }
+
+ status_t status = mThread->join();
+ if (status != WOULD_BLOCK) {
+ //CAMHAL_ASSERT(status, (status_t)NO_ERROR);
+ }
+}
+
+void CallbackDispatcher::post(const OmxMessage &msg) {
+ android::Mutex::Autolock autoLock(mLock);
+
+ mQueue.push_back(msg);
+ mQueueChanged.signal();
+}
+
+void CallbackDispatcher::dispatch(const OmxMessage &msg) {
+
+ switch(msg.type)
+ {
+ case OmxMessage::EVENT :
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.eventData.appData)->eventHandler(msg.u.eventData.event, msg.u.eventData.data1, msg.u.eventData.data2, msg.u.eventData.pEventData);
+ break;
+ }
+
+ case OmxMessage::EMPTY_BUFFER_DONE:
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->emptyBufferDoneHandler(msg.u.bufferData.pBuffHead);
+ break;
+ }
+
+ case OmxMessage::FILL_BUFFER_DONE:
+ {
+ static_cast<OmxFrameDecoder*>(msg.u.bufferData.appData)->fillBufferDoneHandler(msg.u.bufferData.pBuffHead);
+ break;
+ }
+ };
+}
+
+bool CallbackDispatcher::loop() {
+ for (;;) {
+ OmxMessage msg;
+
+ {
+ android::Mutex::Autolock autoLock(mLock);
+ while (!mDone && mQueue.empty()) {
+ mQueueChanged.wait(mLock);
+ }
+
+ if (mDone) {
+ break;
+ }
+
+ msg = *mQueue.begin();
+ mQueue.erase(mQueue.begin());
+ }
+
+ dispatch(msg);
+ }
+
+ return false;
+}
+
+bool CallbackDispatcherThread::threadLoop() {
+ return mDispatcher->loop();
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData) {
+ OmxMessage msg;
+ msg.type = OmxMessage::EVENT;
+ msg.u.eventData.appData = appData;
+ msg.u.eventData.event = event;
+ msg.u.eventData.data1 = data1;
+ msg.u.eventData.data2 = data2;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneCallback(OMX_HANDLETYPE hComponent,
+ OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
+ OmxMessage msg;
+ msg.type = OmxMessage::EMPTY_BUFFER_DONE;
+ msg.u.bufferData.appData = appData;
+ msg.u.bufferData.pBuffHead = pBuffHead;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+//Static
+OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneCallback(OMX_HANDLETYPE hComponent,
+ OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead) {
+ OmxMessage msg;
+ msg.type = OmxMessage::FILL_BUFFER_DONE;
+ msg.u.bufferData.appData = appData;
+ msg.u.bufferData.pBuffHead = pBuffHead;
+ ((OmxFrameDecoder *)appData)->mDispatcher.post(msg);
+ return OMX_ErrorNone;
+}
+
+OmxFrameDecoder::OmxFrameDecoder(DecoderType type)
+ : mOmxInialized(false), mCurrentState(OmxDecoderState_Unloaded), mPreviousState(OmxDecoderState_Unloaded),
+ mStopping(false), mDecoderType(type) {
+}
+
+OmxFrameDecoder::~OmxFrameDecoder() {
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
+ android::AutoMutex lock(mHwLock);
+
+ int bufferIndex = reinterpret_cast<int>(pBuffHead->pAppPrivate);
+ CAMHAL_LOGD("Got header %p id = %d", pBuffHead, bufferIndex);
+ android::sp<MediaBuffer>& in = mInBuffers->editItemAt(bufferIndex);
+
+ android::AutoMutex itemLock(in->getLock());
+ in->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_InDecoded : BufferStatus_InQueued);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead) {
+ android::AutoMutex lock(mHwLock);
+
+ int index = (int)pBuffHead->pAppPrivate;
+ android::sp<MediaBuffer>& out = mOutBuffers->editItemAt(index);
+
+ android::AutoMutex itemLock(out->getLock());
+ CameraBuffer* frame = static_cast<CameraBuffer*>(out->buffer);
+ out->setOffset(pBuffHead->nOffset);
+ out->setTimestamp(pBuffHead->nTimeStamp);
+ out->setStatus((getOmxState() == OmxDecoderState_Executing) ? BufferStatus_OutFilled : BufferStatus_OutQueued);
+
+ return OMX_ErrorNone;
+}
+
+OMX_ERRORTYPE OmxFrameDecoder::eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE ret = OMX_ErrorNone;
+ android::AutoMutex lock(mHwLock);
+
+ switch(event) {
+
+ case OMX_EventCmdComplete:
+ {
+ if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateIdle)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateIdle\n");
+ commitState(OmxDecoderState_Idle);
+ mStateCondition.signal();
+ }
+ else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateExecuting)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateExecuting\n");
+ commitState(OmxDecoderState_Executing);
+ mStateCondition.signal();
+ }
+ else if ((data1 == OMX_CommandStateSet) && (data2 == OMX_StateLoaded)) {
+ CAMHAL_LOGD("Component State Changed To OMX_StateLoaded\n");
+ if(getOmxState() == OmxDecoderState_Executing)
+ commitState(OmxDecoderState_Loaded);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandFlush) {
+ CAMHAL_LOGD("OMX_CommandFlush done on %d port\n", data2);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandPortDisable) {
+ CAMHAL_LOGD("OMX_CommandPortDisable done on %d port\n", data2);
+ mStateCondition.signal();
+ }
+ else if (data1 == OMX_CommandPortEnable) {
+ CAMHAL_LOGD("OMX_CommandPortEnable done on %d port\n", data2);
+ mStateCondition.signal();
+ } else {
+ CAMHAL_LOGD("Event %d done on %d port\n", data1, data2);
+ }
+ break;
+ }
+ case OMX_EventError:
+ {
+ CAMHAL_LOGD("\n\n\nOMX Component reported an Error!!!! 0x%x 0x%x\n\n\n", data1, data2);
+ commitState(OmxDecoderState_Error);
+ omxSendCommand(OMX_CommandStateSet, OMX_StateInvalid);
+ mStateCondition.signal();
+ break;
+ }
+ case OMX_EventPortSettingsChanged:
+ {
+ CAMHAL_LOGD("\n\n\nOMX_EventPortSettingsChanged(port=%ld, data2=0x%08lx)\n\n\n",
+ data1, data2);
+ if (data2 == 0) {
+ // This means that some serious change to port happens
+ commitState(OmxDecoderState_Reconfigure);
+ } else if (data2 == OMX_IndexConfigCommonOutputCrop) {
+#if 0
+ OMX_CONFIG_RECTTYPE rect;
+ InitOMXParams(&rect);
+ rect.nPortIndex = PortIndexOutput;
+ status_t ret = omxGetConfig(OMX_IndexConfigCommonOutputCrop, &rect);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't get new crop parameters 0x%x", ret);
+ break;
+ }
+
+ CAMHAL_LOGV("Crop should change to %d %d %d %d", rect.nLeft, rect.nTop, rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight);
+#endif
+ }
+ break;
+ }
+ default:
+ {
+ CAMHAL_LOGD("\n\n\nOMX Unhandelled event ID=0x%x!!!!\n\n\n", event);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return ret;
+ }
+
+void OmxFrameDecoder::doConfigure(const DecoderParameters& config) {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OmxFrameDecoder::enableGrallockHandles() {
+ OMX_TI_PARAMUSENATIVEBUFFER domxUseGrallocHandles;
+ InitOMXParams(&domxUseGrallocHandles);
+
+ domxUseGrallocHandles.nPortIndex = PortIndexOutput;
+ domxUseGrallocHandles.bEnable = OMX_TRUE;
+
+ return omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexUseNativeBuffers, &domxUseGrallocHandles);
+}
+
+status_t OmxFrameDecoder::omxSwitchToExecutingSync() {
+ CAMHAL_LOGV("Try set OMX_StateExecuting");
+ android::AutoMutex lock(mHwLock);
+ omxSendCommand(OMX_CommandStateSet, OMX_StateExecuting);
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to EXECUTING ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+void OmxFrameDecoder::dumpPortSettings(PortType port) {
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = port;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ omxDumpPortSettings(def);
+}
+
+status_t OmxFrameDecoder::disablePortSync(int port) {
+ OMX_ERRORTYPE eError;
+ android::AutoMutex lock(mHwLock);
+ eError = OMX_SendCommand(mHandleComp, OMX_CommandPortDisable, port, NULL);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_CommandPortDisable OMX_ALL returned error 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::enablePortSync(int port) {
+ android::AutoMutex lock(mHwLock);
+ OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, OMX_CommandPortEnable, port, NULL);
+ status_t ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SendCommand OMX_CommandPortEnable OUT returned error 0x%x", eError);
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ return NO_ERROR;
+}
+
+
+status_t OmxFrameDecoder::doPortReconfigure() {
+ OMX_ERRORTYPE eError;
+ status_t ret = NO_ERROR;
+
+ CAMHAL_LOGD("Starting port reconfiguration !");
+ dumpPortSettings(PortIndexInput);
+ dumpPortSettings(PortIndexOutput);
+
+ android::AutoMutex lock(mHwLock);
+
+ omxSendCommand(OMX_CommandFlush, PortIndexOutput);
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ omxSendCommand(OMX_CommandFlush, PortIndexInput);
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_CommandFlush ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ ret = omxSendCommand(OMX_CommandPortDisable, PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_CommandPortDisable PortIndexOutput returned error 0x%x", ret);
+ return ret;
+ }
+
+ freeBuffersOnOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+ CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+
+
+ ret = omxSendCommand(OMX_CommandPortEnable, PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable returned error 0x%x", ret);
+ return ret;
+ }
+
+ allocateBuffersOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("omxSendCommand OMX_CommandPortEnable timeout 0x%x", ret);
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGD("Port reconfiguration DONE!");
+ //dumpPortSettings(PortIndexOutput);
+
+ return NO_ERROR;
+}
+
+void OmxFrameDecoder::queueOutputBuffers() {
+
+ LOG_FUNCTION_NAME;
+
+ android::GraphicBufferMapper &mapper = android::GraphicBufferMapper::get();
+
+ for (size_t i = 0; i < mOutQueue.size(); i++) {
+ int index = mOutQueue[i];
+ android::sp<MediaBuffer> &outBuffer = mOutBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(outBuffer->getLock());
+ if (outBuffer->getStatus() == BufferStatus_OutQueued) {
+ outBuffer->setStatus(BufferStatus_OutWaitForFill);
+ CameraBuffer* frame = static_cast<CameraBuffer*>(outBuffer->buffer);
+ OMX_BUFFERHEADERTYPE *pOutBufHdr = mOutBufferHeaders[outBuffer->bufferId];
+ CAMHAL_LOGV("Fill this buffer cf=%p bh=%p id=%d", frame, pOutBufHdr, outBuffer->bufferId);
+ status_t status = omxFillThisBuffer(pOutBufHdr);
+ CAMHAL_ASSERT(status == NO_ERROR);
+ }
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doProcessInputBuffer() {
+
+ LOG_FUNCTION_NAME;
+
+ if (getOmxState() == OmxDecoderState_Reconfigure) {
+ if (doPortReconfigure() == NO_ERROR) {
+ commitState(OmxDecoderState_Executing);
+ queueOutputBuffers();
+ } else {
+ commitState(OmxDecoderState_Error);
+ return;
+ }
+
+ }
+
+ if (getOmxState() == OmxDecoderState_Idle) {
+ CAMHAL_ASSERT(omxSwitchToExecutingSync() == NO_ERROR);
+ queueOutputBuffers();
+ }
+
+ if (getOmxState() == OmxDecoderState_Executing) {
+ for (size_t i = 0; i < mInQueue.size(); i++) {
+ int index = mInQueue[i];
+ CAMHAL_LOGD("Got in inqueue[%d] buffer id=%d", i, index);
+ android::sp<MediaBuffer> &inBuffer = mInBuffers->editItemAt(index);
+ android::AutoMutex bufferLock(inBuffer->getLock());
+ if (inBuffer->getStatus() == BufferStatus_InQueued) {
+ OMX_BUFFERHEADERTYPE *pInBufHdr = mInBufferHeaders[index];
+ inBuffer->setStatus(BufferStatus_InWaitForEmpty);
+ omxEmptyThisBuffer(inBuffer, pInBufHdr);
+ }
+ }
+ queueOutputBuffers();
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+status_t OmxFrameDecoder::omxInit() {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_Init();
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGEB("OMX_Init() failed, error: 0x%x", eError);
+ }
+ else mOmxInialized = true;
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr) {
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ pOutBufHdr->nFilledLen = 0;
+ pOutBufHdr->nOffset = 0;
+ pOutBufHdr->nFlags = 0;
+
+ eError = OMX_FillThisBuffer(mHandleComp, pOutBufHdr);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_FillThisBuffer ERROR 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+
+status_t OmxFrameDecoder::omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData,
+ OMX_CALLBACKTYPE & callbacks) {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorUndefined;
+
+ eError = OMX_GetHandle(handle, (OMX_STRING)"OMX.TI.DUCATI1.VIDEO.DECODER", pAppData, &callbacks);
+ if((eError != OMX_ErrorNone) || (handle == NULL)) {
+ handle = NULL;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+ commitState(OmxDecoderState_Loaded);
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+
+status_t OmxFrameDecoder::omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ CAMHAL_LOGD("Founded id for empty is %d ", inBuffer->bufferId);
+ if (inBuffer->filledLen > def.nBufferSize) {
+ CAMHAL_LOGE("Can't copy IN buffer due to it too small %d than needed %d", def.nBufferSize, inBuffer->filledLen);
+ return UNKNOWN_ERROR;
+ }
+ memcpy(pInBufHdr->pBuffer, reinterpret_cast<unsigned char*>(inBuffer->buffer), inBuffer->filledLen);
+ CAMHAL_LOGV("Copied %d bytes into In buffer with bh=%p", inBuffer->filledLen, pInBufHdr);
+ CAMHAL_LOGV("Empty this buffer id=%d timestamp %lld offset=%d", inBuffer->bufferId, pInBufHdr->nTimeStamp, pInBufHdr->nOffset);
+ pInBufHdr->nFilledLen = inBuffer->filledLen;
+ pInBufHdr->nTimeStamp = inBuffer->getTimestamp();
+ pInBufHdr->nFlags = 16;
+ pInBufHdr->nOffset = 0;
+ eError = OMX_EmptyThisBuffer(mHandleComp, pInBufHdr);
+ if (eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_EmptyThisBuffer ERROR 0x%x", eError);
+ Utils::ErrorUtils::omxToAndroidError(eError);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+
+ return NO_ERROR;
+}
+
+
+status_t OmxFrameDecoder::allocateBuffersOutput() {
+ LOG_FUNCTION_NAME;
+
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+
+ CAMHAL_LOGD("Will set def.nBufferSize=%d stride=%d height=%d", def.nBufferSize , def.format.video.nStride, def.format.video.nFrameHeight);
+
+ OMX_BUFFERHEADERTYPE *pOutBufHdr;
+ mOutBufferHeaders.clear();
+ for (size_t i = 0; i < mOutBuffers->size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(i);
+ android::AutoMutex lock(outBuffer->getLock());
+ CameraBuffer* cb = static_cast<CameraBuffer*>(outBuffer->buffer);
+ OMX_U8 * outPtr = static_cast<OMX_U8*>(camera_buffer_get_omx_ptr(cb));
+ CAMHAL_LOGV("Try to set OMX_UseBuffer [0x%x] for output port with length %d ", outPtr, def.nBufferSize);
+ eError = OMX_UseBuffer(mHandleComp, &pOutBufHdr, PortIndexOutput, (void*)i, def.nBufferSize, outPtr);
+
+ if (eError != OMX_ErrorNone) {
+ ALOGE("OMX_UseBuffer failed with error %d (0x%08x)", eError, eError);
+ commitState(OmxDecoderState_Error);
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGD("Got buffer header %p", pOutBufHdr);
+ mOutBufferHeaders.add(pOutBufHdr);
+ }
+
+ omxDumpPortSettings(def);
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+
+}
+
+status_t OmxFrameDecoder::allocateBuffersInput() {
+ LOG_FUNCTION_NAME;
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ OMX_BUFFERHEADERTYPE *pInBufHdr;
+ OMX_ERRORTYPE eError = OMX_ErrorNone;
+
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+
+ // TODO: Will be changed since port reconfiguration will be handled
+ def.nBufferCountActual = mInBuffers->size();
+ def.bEnabled = OMX_TRUE;
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+ mInBufferHeaders.clear();
+
+ for (size_t i = 0; i < mInBuffers->size(); i++) {
+ CAMHAL_LOGD("Will do OMX_AllocateBuffer for input port with size %d id=%d", def.nBufferSize, i);
+ eError = OMX_AllocateBuffer(mHandleComp, &pInBufHdr, PortIndexInput, (void*)i, def.nBufferSize);
+ if (eError != OMX_ErrorNone) {
+ ALOGE("OMX_AllocateBuffer failed with error %d (0x%08x)", eError, eError);
+ commitState(OmxDecoderState_Error);
+ return UNKNOWN_ERROR;
+ }
+ CAMHAL_LOGD("Got new buffer header [%p] for IN port", pInBufHdr);
+ mInBufferHeaders.push_back(pInBufHdr);
+ }
+
+ LOG_FUNCTION_NAME_EXIT;
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::getAndConfigureDecoder() {
+ status_t ret = NO_ERROR;
+ OMX_ERRORTYPE eError;
+
+ ret = omxInit();
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_Init returned error 0x%x", ret);
+ return ret;
+ }
+ OMX_CALLBACKTYPE callbacks;
+ callbacks.EventHandler = OmxFrameDecoder::eventCallback;
+ callbacks.EmptyBufferDone = OmxFrameDecoder::emptyBufferDoneCallback;
+ callbacks.FillBufferDone = OmxFrameDecoder::fillBufferDoneCallback;
+ ret = omxGetHandle(&mHandleComp, this, callbacks);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("OMX_GetHandle returned error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ ret = setComponentRole();
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("setComponentRole returned error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ disablePortSync(PortIndexOutput);
+ ret = setVideoOutputFormat(mParams.width, mParams.height);
+ enablePortSync(PortIndexOutput);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't set output format error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+ enableGrallockHandles();
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::switchToIdle() {
+ CAMHAL_ASSERT(getOmxState() == OmxDecoderState_Loaded);
+ CAMHAL_LOGD("Try set OMX_StateIdle");
+ android::AutoMutex lock(mHwLock);
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ OMX_Deinit();
+ mOmxInialized = false;
+ return ret;
+ }
+
+ allocateBuffersInput();
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexOutput;
+ omxGetParameter(OMX_IndexParamPortDefinition, &def);
+ def.nBufferCountActual = mParams.outputBufferCount;
+ omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+ allocateBuffersOutput();
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
+ return ret;
+ }
+ commitState(OmxDecoderState_Idle);
+ return NO_ERROR;
+}
+
+status_t OmxFrameDecoder::doStart() {
+ LOG_FUNCTION_NAME;
+
+ status_t ret = NO_ERROR;
+ mStopping = false;
+ OMX_ERRORTYPE eError;
+
+ ret = getAndConfigureDecoder();
+
+#if 0
+ OMX_TI_PARAM_ENHANCEDPORTRECONFIG tParamStruct;
+ tParamStruct.nSize = sizeof(OMX_TI_PARAM_ENHANCEDPORTRECONFIG);
+ tParamStruct.nVersion.s.nVersionMajor = 0x1;
+ tParamStruct.nVersion.s.nVersionMinor = 0x1;
+ tParamStruct.nVersion.s.nRevision = 0x0;
+ tParamStruct.nVersion.s.nStep = 0x0;
+ tParamStruct.nPortIndex = PortIndexOutput;
+ tParamStruct.bUsePortReconfigForCrop = OMX_TRUE;
+ tParamStruct.bUsePortReconfigForPadding = OMX_FALSE;
+ omxSetParameter((OMX_INDEXTYPE)OMX_TI_IndexParamUseEnhancedPortReconfig, &tParamStruct);
+#endif
+
+ // Transition to IDLE
+ ret = switchToIdle();
+ dumpPortSettings(PortIndexInput);
+ dumpPortSettings(PortIndexOutput);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return ret;
+}
+
+status_t OmxFrameDecoder::omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_GetParameter(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_GetParameter - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_GetConfig(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_GetConfig - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_SetParameter(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SetParameter - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr) {
+ OMX_ERRORTYPE eError = OMX_SetConfig(mHandleComp, index, ptr);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SetConfig - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param) {
+ OMX_ERRORTYPE eError = OMX_SendCommand(mHandleComp, cmd, param, NULL);
+ if(eError != OMX_ErrorNone) {
+ CAMHAL_LOGE("OMX_SendCommand - error 0x%x", eError);
+ }
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::setVideoOutputFormat(OMX_U32 width, OMX_U32 height) {
+ LOG_FUNCTION_NAME;
+
+ CAMHAL_LOGV("setVideoOutputFormat width=%ld, height=%ld", width, height);
+
+ OMX_VIDEO_CODINGTYPE compressionFormat = gCompressionFormat[mDecoderType];
+
+ status_t err = setVideoPortFormatType(
+ PortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
+
+ if (err != NO_ERROR) {
+ CAMHAL_LOGE("Error during setVideoPortFormatType 0x%x", err);
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE def;
+ InitOMXParams(&def);
+ def.nPortIndex = PortIndexInput;
+
+ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
+
+ err = omxGetParameter(OMX_IndexParamPortDefinition, &def);
+
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ video_def->nFrameWidth = width;
+ video_def->nFrameHeight = height;
+
+ video_def->eCompressionFormat = compressionFormat;
+ video_def->eColorFormat = OMX_COLOR_FormatUnused;
+
+
+ err = omxSetParameter(OMX_IndexParamPortDefinition, &def);
+
+
+ if (err != OK) {
+ return err;
+ }
+
+ OMX_PARAM_PORTDEFINITIONTYPE odef;
+ OMX_VIDEO_PORTDEFINITIONTYPE *out_video_def = &odef.format.video;
+
+ InitOMXParams(&odef);
+ odef.nPortIndex = PortIndexOutput;
+
+ err = omxGetParameter(OMX_IndexParamPortDefinition, &odef);
+ if (err != NO_ERROR) {
+ return err;
+ }
+
+ out_video_def->nFrameWidth = width;
+ out_video_def->nFrameHeight = height;
+ out_video_def->xFramerate = 30<< 16;//((width >= 720) ? 60 : 30) << 16;
+ out_video_def->nStride = 4096;
+
+ err = omxSetParameter(OMX_IndexParamPortDefinition, &odef);
+ CAMHAL_LOGD("OUT port is configured");
+ dumpPortSettings(PortIndexOutput);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return err;
+}
+
+status_t OmxFrameDecoder::setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat) {
+
+ LOG_FUNCTION_NAME;
+
+ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
+ InitOMXParams(&format);
+ format.nPortIndex = portIndex;
+ format.nIndex = 0;
+ bool found = false;
+
+ OMX_U32 index = 0;
+ for (;;) {
+ CAMHAL_LOGV("Will check index = %d", index);
+ format.nIndex = index;
+ OMX_ERRORTYPE eError = OMX_GetParameter(
+ mHandleComp, OMX_IndexParamVideoPortFormat,
+ &format);
+
+ CAMHAL_LOGV("format.eCompressionFormat=0x%x format.eColorFormat=0x%x", format.eCompressionFormat, format.eColorFormat);
+
+ if (format.eCompressionFormat == compressionFormat
+ && format.eColorFormat == colorFormat) {
+ found = true;
+ break;
+ }
+
+ ++index;
+ if (index >= kMaxColorFormatSupported) {
+ CAMHAL_LOGE("color format %d or compression format %d is not supported",
+ colorFormat, compressionFormat);
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ if (!found) {
+ return UNKNOWN_ERROR;
+ }
+
+ CAMHAL_LOGV("found a match.");
+ OMX_ERRORTYPE eError = OMX_SetParameter(
+ mHandleComp, OMX_IndexParamVideoPortFormat,
+ &format);
+
+ LOG_FUNCTION_NAME_EXIT;
+ return Utils::ErrorUtils::omxToAndroidError(eError);
+}
+
+status_t OmxFrameDecoder::setComponentRole() {
+ OMX_PARAM_COMPONENTROLETYPE roleParams;
+ const char *role = gDecoderRole[mDecoderType];
+ InitOMXParams(&roleParams);
+
+ strncpy((char *)roleParams.cRole,
+ role, OMX_MAX_STRINGNAME_SIZE - 1);
+ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
+
+ return omxSetParameter(OMX_IndexParamStandardComponentRole, &roleParams);
+}
+
+void OmxFrameDecoder::freeBuffersOnOutput() {
+ LOG_FUNCTION_NAME;
+ for (size_t i = 0; i < mOutBufferHeaders.size(); i++) {
+ OMX_BUFFERHEADERTYPE* header = mOutBufferHeaders[i];
+ CAMHAL_LOGD("Freeing OUT buffer header %p", header);
+ OMX_FreeBuffer(mHandleComp, PortIndexOutput, header);
+ }
+ mOutBufferHeaders.clear();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::freeBuffersOnInput() {
+ LOG_FUNCTION_NAME;
+ for (size_t i = 0; i < mInBufferHeaders.size(); i++) {
+ OMX_BUFFERHEADERTYPE* header = mInBufferHeaders[i];
+ CAMHAL_LOGD("Freeing IN buffer header %p", header);
+ OMX_FreeBuffer(mHandleComp, PortIndexInput, header);
+ }
+ mInBufferHeaders.clear();
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doStop() {
+ LOG_FUNCTION_NAME;
+
+ mStopping = true;
+ android::AutoMutex lock(mHwLock);
+
+ CAMHAL_LOGD("HwFrameDecoder::doStop state id=%d", getOmxState());
+
+ if ((getOmxState() == OmxDecoderState_Executing) || (getOmxState() == OmxDecoderState_Reconfigure)) {
+
+ CAMHAL_LOGD("Try set OMX_StateIdle");
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateIdle);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ }
+
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to IDLE ERROR 0x%x", ret);
+ }
+ commitState(OmxDecoderState_Idle);
+ }
+
+ if (getOmxState() == OmxDecoderState_Idle) {
+
+ CAMHAL_LOGD("Try set OMX_StateLoaded");
+ status_t ret = omxSendCommand(OMX_CommandStateSet, OMX_StateLoaded);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("Can't omxSendCommandt error 0x%x", ret);
+ return;
+ }
+ freeBuffersOnOutput();
+ freeBuffersOnInput();
+ ret = mStateCondition.waitRelative(mHwLock, kMaxStateSwitchTimeOut);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGE("State transition to OMX_StateLoaded ERROR 0x%x", ret);
+ }
+ commitState(OmxDecoderState_Loaded);
+
+ }
+
+ if (getOmxState() == OmxDecoderState_Error) {
+ CAMHAL_LOGD("In state ERROR will try to free buffers!");
+ freeBuffersOnOutput();
+ freeBuffersOnInput();
+ }
+
+ CAMHAL_LOGD("Before OMX_FreeHandle ....");
+ OMX_FreeHandle(mHandleComp);
+ CAMHAL_LOGD("After OMX_FreeHandle ....");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doFlush() {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::doRelease() {
+ LOG_FUNCTION_NAME;
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+void OmxFrameDecoder::omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def) {
+ CAMHAL_LOGD("----------Port settings start--------------------");
+ CAMHAL_LOGD("nSize=%d nPortIndex=%d eDir=%d nBufferCountActual=%d", def.nSize, def.nPortIndex, def.eDir, def.nBufferCountActual);
+ CAMHAL_LOGD("nBufferCountMin=%d nBufferSize=%d bEnabled=%d bPopulated=%d bBuffersContiguous=%d nBufferAlignment=%d", def.nBufferCountMin, def.nBufferSize, def.bEnabled, def.bPopulated, def.bBuffersContiguous, def.nBufferAlignment);
+
+ CAMHAL_LOGD("eDomain = %d",def.eDomain);
+
+ if (def.eDomain == OMX_PortDomainVideo) {
+ CAMHAL_LOGD("===============Video Port===================");
+ CAMHAL_LOGD("cMIMEType=%s",def.format.video.cMIMEType);
+ CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.video.nFrameWidth, def.format.video.nFrameHeight);
+ CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.video.nStride, def.format.video.nSliceHeight);
+ CAMHAL_LOGD("nBitrate=%d xFramerate=%d", def.format.video.nBitrate, def.format.video.xFramerate>>16);
+ CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.video.bFlagErrorConcealment, def.format.video.eCompressionFormat);
+ CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.video.eColorFormat, def.format.video.pNativeWindow);
+ CAMHAL_LOGD("===============END Video Part===================");
+ }
+ else if (def.eDomain == OMX_PortDomainImage) {
+ CAMHAL_LOGD("===============Image Port===================");
+ CAMHAL_LOGD("cMIMEType=%s",def.format.image.cMIMEType);
+ CAMHAL_LOGD("nFrameWidth=%d nFrameHeight=%d", def.format.image.nFrameWidth, def.format.image.nFrameHeight);
+ CAMHAL_LOGD("nStride=%d nSliceHeight=%d", def.format.image.nStride, def.format.image.nSliceHeight);
+ CAMHAL_LOGD("bFlagErrorConcealment=%d eCompressionFormat=%d", def.format.image.bFlagErrorConcealment, def.format.image.eCompressionFormat);
+ CAMHAL_LOGD("eColorFormat=0x%x pNativeWindow=%p", def.format.image.eColorFormat, def.format.image.pNativeWindow);
+ CAMHAL_LOGD("===============END Image Part===================");
+ }
+ CAMHAL_LOGD("----------Port settings end--------------------");
+}
+
+void OmxFrameDecoder::omxDumpBufferHeader(OMX_BUFFERHEADERTYPE* bh) {
+ CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE start==============");
+ CAMHAL_LOGD("nAllocLen=%d nFilledLen=%d nOffset=%d nFlags=0x%x", bh->nAllocLen, bh->nFilledLen, bh->nOffset, bh->nFlags);
+ CAMHAL_LOGD("pBuffer=%p nOutputPortIndex=%d nInputPortIndex=%d nSize=0x%x", bh->pBuffer, bh->nOutputPortIndex, bh->nInputPortIndex, bh->nSize);
+ CAMHAL_LOGD("nVersion=0x%x", bh->nVersion);
+ CAMHAL_LOGD("==============OMX_BUFFERHEADERTYPE end==============");
+}
+
+bool OmxFrameDecoder::getPaddedDimensions(size_t &width, size_t &height) {
+
+ switch (height) {
+
+ case 480: {
+ height = 576;
+ if (width == 640) {
+ width = 768;
+ }
+ break;
+ }
+ case 720: {
+ height = 832;
+ if (width == 1280) {
+ width = 1408;
+ }
+ break;
+ }
+ case 1080: {
+ height = 1184;
+ if (width == 1920) {
+ width = 2048;
+ }
+ break;
+ }
+
+ }
+
+ CAMHAL_LOGE("WxH updated to padded values : %d x %d", width, height);
+ return true;
+}
+
+} // namespace Camera
+} // namespace Ti
+
diff --git a/camera/SwFrameDecoder.cpp b/camera/SwFrameDecoder.cpp
new file mode 100644
index 0000000..2ce2c0f
--- /dev/null
+++ b/camera/SwFrameDecoder.cpp
@@ -0,0 +1,85 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "Common.h"
+#include "SwFrameDecoder.h"
+
+namespace Ti {
+namespace Camera {
+
+SwFrameDecoder::SwFrameDecoder()
+: mjpegWithHdrSize(0), mJpegWithHeaderBuffer(NULL) {
+}
+
+SwFrameDecoder::~SwFrameDecoder() {
+ delete [] mJpegWithHeaderBuffer;
+ mJpegWithHeaderBuffer = NULL;
+}
+
+
+void SwFrameDecoder::doConfigure(const DecoderParameters& params) {
+ LOG_FUNCTION_NAME;
+
+ mjpegWithHdrSize = (mParams.width * mParams.height / 2) +
+ mJpgdecoder.readDHTSize();
+ if (mJpegWithHeaderBuffer != NULL) {
+ delete [] mJpegWithHeaderBuffer;
+ mJpegWithHeaderBuffer = NULL;
+ }
+ mJpegWithHeaderBuffer = new unsigned char[mjpegWithHdrSize];
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+void SwFrameDecoder::doProcessInputBuffer() {
+ LOG_FUNCTION_NAME;
+ nsecs_t timestamp = 0;
+
+ CAMHAL_LOGV("Will add header to MJPEG");
+ int final_jpg_sz = 0;
+ {
+ int inIndex = mInQueue.itemAt(0);
+ android::sp<MediaBuffer>& inBuffer = mInBuffers->editItemAt(inIndex);
+ android::AutoMutex lock(inBuffer->getLock());
+ timestamp = inBuffer->getTimestamp();
+ final_jpg_sz = mJpgdecoder.appendDHT(
+ reinterpret_cast<unsigned char*>(inBuffer->buffer),
+ inBuffer->filledLen, mJpegWithHeaderBuffer, mjpegWithHdrSize);
+ inBuffer->setStatus(BufferStatus_InDecoded);
+ }
+ CAMHAL_LOGV("Added header to MJPEG");
+ {
+ int outIndex = mOutQueue.itemAt(0);
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers->editItemAt(outIndex);
+ android::AutoMutex lock(outBuffer->getLock());
+ CameraBuffer* buffer = reinterpret_cast<CameraBuffer*>(outBuffer->buffer);
+ if (!mJpgdecoder.decode(mJpegWithHeaderBuffer, final_jpg_sz,
+ reinterpret_cast<unsigned char*>(buffer->mapped), 4096)) {
+ CAMHAL_LOGEA("Error while decoding JPEG");
+ return;
+ }
+ outBuffer->setTimestamp(timestamp);
+ outBuffer->setStatus(BufferStatus_OutFilled);
+ }
+ CAMHAL_LOGV("JPEG decoded!");
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
+
+} // namespace Camera
+} // namespace Ti
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index e22d33b..3ba0fa9 100755
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -37,11 +37,10 @@
#include <sys/mman.h>
#include <sys/select.h>
#include <linux/videodev.h>
+#include <cutils/properties.h>
-#include <ui/GraphicBuffer.h>
-#include <ui/GraphicBufferMapper.h>
+#include "DecoderFactory.h"
-#include <cutils/properties.h>
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
static int mDebugFps = 0;
@@ -55,11 +54,6 @@ namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-//define this macro to save first few raw frames when starting the preview.
-//#define SAVE_RAW_FRAMES 1
-//#define DUMP_CAPTURE_FRAME 1
-//#define PPM_PER_FRAME_CONVERSION 1
-
//Proto Types
static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
@@ -68,14 +62,40 @@ static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int wid
android::Mutex gV4LAdapterLock;
char device[15];
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ if(mDebugFps) {
+ mFrameCount++;
+ if ((mFrameCount % 30 == 0)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGE("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ }
+}
+
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
/*--------------------V4L wrapper functions -------------------------------*/
+
+bool V4LCameraAdapter::isNeedToUseDecoder() const {
+ return mPixelFormat != V4L2_PIX_FMT_YUYV;
+}
+
status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t ret = NO_ERROR;
errno = 0;
+ android::AutoMutex lock(mV4LLock);
+
do {
ret = ioctl (fd, req, argp);
}while (-1 == ret && EINTR == errno);
@@ -86,9 +106,10 @@ status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t V4LCameraAdapter::v4lInitMmap(int& count) {
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
int width, height;
mParams.getPreviewSize(&width, &height);
- jpeg_with_dht_buffer_size = (width * height / 2) + jpgdecoder.readDHTSize();
//First allocate adapter internal buffers at V4L level for USB Cam
//These are the buffers from which we will copy the data into overlay buffers
@@ -104,6 +125,10 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count) {
}
count = mVideoInfo->rb.count;
+
+ //Since we will do mapping of new In buffers - clear input MediaBuffer storage
+ mInBuffers.clear();
+
for (int i = 0; i < count; i++) {
memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
@@ -131,12 +156,25 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count) {
return -1;
}
- if (jpeg_with_dht_buffer[i] != NULL){
- free(jpeg_with_dht_buffer[i]);
- jpeg_with_dht_buffer[i] = NULL;
- }
- jpeg_with_dht_buffer[i] = (unsigned char *)malloc(jpeg_with_dht_buffer_size);
+ MediaBuffer* buffer = new MediaBuffer(i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ mInBuffers.push_back(buffer);
+ }
+
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerInputBuffers(&mInBuffers);
+ DecoderParameters params;
+ params.width = width;
+ params.height = height;
+ params.inputBufferCount = count;
+ params.outputBufferCount = count;
+ mDecoder->configure(params);
}
+
+
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
@@ -161,6 +199,8 @@ status_t V4LCameraAdapter::v4lStartStreaming () {
status_t ret = NO_ERROR;
enum v4l2_buf_type bufType;
+ LOG_FUNCTION_NAME;
+
if (!mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = applyFpsValue();
@@ -174,6 +214,8 @@ status_t V4LCameraAdapter::v4lStartStreaming () {
}
mVideoInfo->isStreaming = true;
}
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -181,6 +223,8 @@ status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
status_t ret = NO_ERROR;
enum v4l2_buf_type bufType;
+ LOG_FUNCTION_NAME;
+
if (mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -212,12 +256,15 @@ status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
}
}
EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
if (ret < 0) {
@@ -241,6 +288,9 @@ status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_for
}
v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ CAMHAL_LOGD("### Using: WxH = %dx%d pixelformat=0x%x ", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height, mVideoInfo->format.fmt.pix.pixelformat);
+ CAMHAL_LOGD("### Using: bytesperline=%d sizeimage=%d colorspace=0x%x", mVideoInfo->format.fmt.pix.bytesperline, mVideoInfo->format.fmt.pix.sizeimage, mVideoInfo->format.fmt.pix.colorspace);
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -251,10 +301,12 @@ status_t V4LCameraAdapter::restartPreview ()
int height = 0;
struct v4l2_streamparm streamParams;
+ LOG_FUNCTION_NAME;
+
//configure for preview size and pixel format.
mParams.getPreviewSize(&width, &height);
- ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ ret = v4lSetFormat (width, height, mPixelFormat);
if (ret < 0) {
CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
goto EXIT;
@@ -284,6 +336,7 @@ status_t V4LCameraAdapter::restartPreview ()
ret = v4lStartStreaming();
CAMHAL_LOGDA("Ready for preview....");
EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -347,6 +400,7 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
{
status_t ret = NO_ERROR;
int idx = 0;
+
LOG_FUNCTION_NAME;
android::AutoMutex lock(mLock);
@@ -359,10 +413,11 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
mEndImageCaptureCallback(mEndCaptureData);
mLock.lock();
}
- goto EXIT;
+ return ret;
}
+
if ( !mVideoInfo->isStreaming ) {
- goto EXIT;
+ return ret;
}
for (int xx = 0; xx < NB_BUFFER; xx++){
@@ -373,24 +428,44 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
}
if (idx == NB_BUFFER){
CAMHAL_LOGEB("Wrong index = %d. What do i do? What do i do?",idx);
- goto EXIT;
+ return ret;
}
if(idx < 0) {
CAMHAL_LOGEB("Wrong index = %d",idx);
- goto EXIT;
+ return ret;
}
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mOutBuffers.size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers.editItemAt(i);
+ CameraBuffer* buffer = static_cast<CameraBuffer*>(outBuffer->buffer);
+ if (buffer == frameBuf) {
+ mDecoder->queueOutputBuffer(outBuffer->bufferId);
+ break;
+ }
+ }
- v4l2_buffer buf;
- buf.index = idx;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
+ int inIndex = -1;
+ ret = mDecoder->dequeueInputBuffer(inIndex);
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
- if (ret < 0) {
- CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- goto EXIT;
+ if (ret == NO_ERROR) {
+ ret = returnBufferToV4L(inIndex);
+ }
+
+ } else {
+ v4l2_buffer buf;
+ buf.index = idx;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ CAMHAL_LOGD("Will return buffer to V4L with id=%d", idx);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+
+ nQueued++;
}
- nQueued++;
+
EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -427,9 +502,8 @@ status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params
if(!mPreviewing && !mCapturing) {
params.getPreviewSize(&width, &height);
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
-
- ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
+ ret = v4lSetFormat( width, height, mPixelFormat);
if (ret < 0) {
CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
goto EXIT;
@@ -548,13 +622,20 @@ status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
}
ret = v4lInitMmap(num);
+
+ mOutBuffers.clear();
+
if (ret == NO_ERROR) {
for (int i = 0; i < num; i++) {
//Associate each Camera internal buffer with the one from Overlay
mPreviewBufs[i] = &bufArr[i];
- CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs[i]);
+ MediaBuffer* buffer = new MediaBuffer(i, mPreviewBufs[i]);
+ mOutBuffers.push_back(buffer);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x length=%d",i, mPreviewBufs[i], mFrameQueue.valueFor(mPreviewBufs[i])->mLength);
+ }
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerOutputBuffers(&mOutBuffers);
}
-
// Update the preview buffer count
mPreviewBufferCount = num;
}
@@ -636,6 +717,7 @@ status_t V4LCameraAdapter::takePicture() {
//get the frame and send to encode as JPG
int filledLen;
+ CAMHAL_LOGD("*********Will dequeue frame for Image Capture***********");
fp = this->GetFrame(index, filledLen);
if(!fp) {
CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
@@ -645,7 +727,7 @@ status_t V4LCameraAdapter::takePicture() {
CAMHAL_LOGDA("::Capture Frame received from V4L::");
buffer = mCaptureBufs.keyAt(index);
- CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d fill_length=%d", index, buffer->opaque, yuv422i_buff_size, filledLen);
//copy the yuv422i data to the image buffer.
memcpy(buffer->opaque, fp, yuv422i_buff_size);
@@ -756,7 +838,13 @@ status_t V4LCameraAdapter::startPreview()
}
nQueued++;
}
-
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ mDecoder->queueOutputBuffer(i);
+ CAMHAL_LOGV("Queued output buffer with id=%d ", i);
+ }
+ mDecoder->start();
+ }
ret = v4lStartStreaming();
// Create and start preview thread for receiving buffers from V4L Camera
@@ -787,7 +875,12 @@ status_t V4LCameraAdapter::stopPreview()
return NO_INIT;
}
mPreviewing = false;
-
+ if (isNeedToUseDecoder()) {
+ android::AutoMutex lock(mStopLock);
+ mStopCondition.waitRelative(mStopLock, 100000000);
+ mDecoder->stop();
+ mDecoder->flush();
+ }
ret = v4lStopStreaming(mPreviewBufferCount);
if (ret < 0) {
CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
@@ -797,13 +890,43 @@ status_t V4LCameraAdapter::stopPreview()
nDequeued = 0;
mFramesWithEncoder = 0;
+ mLock.unlock();
+
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 30) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/tmp/dump_%03d.h264", counter);
+ CAMHAL_LOGEB("Dumping h264 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
{
int ret = NO_ERROR;
@@ -832,15 +955,25 @@ char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
return NULL;
}
- nDequeued++;
index = buf.index;
filledLen = buf.bytesused;
+ android::sp<MediaBuffer>& inBuffer = mInBuffers.editItemAt(index);
+ {
+ android::AutoMutex bufferLock(inBuffer->getLock());
+ inBuffer->setTimestamp(systemTime(SYSTEM_TIME_MONOTONIC));
+ inBuffer->filledLen = buf.bytesused;
+ }
+ debugShowFPS();
LOG_FUNCTION_NAME_EXIT;
- return (char *)mVideoInfo->mem[buf.index];
+ return (char *)mVideoInfo->mem[index];
}
+
+
+
+
//API to get the frame size required to be allocated. This size is used to override the size passed
//by camera service when VSTAB/VNF is turned ON for example
status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
@@ -851,8 +984,12 @@ status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
android::AutoMutex lock(mLock);
// Just return the current preview size, nothing more to do here.
- mParams.getPreviewSize(( int * ) &width,
- ( int * ) &height);
+ mParams.getPreviewSize(( int * ) &width,( int * ) &height);
+
+ // TODO: This will reside until correct port reconfiguration handling will done.
+ if (isNeedToUseDecoder()) {
+ mDecoder->getPaddedDimensions(width, height);
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -888,25 +1025,6 @@ status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t buffe
return NO_ERROR;
}
-static void debugShowFPS()
-{
- static int mFrameCount = 0;
- static int mLastFrameCount = 0;
- static nsecs_t mLastFpsTime = 0;
- static float mFps = 0;
- if(mDebugFps) {
- mFrameCount++;
- if (!(mFrameCount & 0x1F)) {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFpsTime;
- mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFpsTime = now;
- mLastFrameCount = mFrameCount;
- CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
- }
- }
-}
-
status_t V4LCameraAdapter::recalculateFPS()
{
float currentFPS;
@@ -947,16 +1065,65 @@ void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME_EXIT;
}
+void V4LCameraAdapter::setupWorkingMode() {
+ char value[PROPERTY_VALUE_MAX];
+ int v4lMode = 0;
+
+ property_get("camera.v4l.mode", value, "3");
+ v4lMode = atoi(value);
+
+ if (mDecoder) {
+ delete mDecoder;
+ mDecoder = NULL;
+ }
+
+ switch (v4lMode) {
+ case 0 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with HW decoding");
+ break;
+ }
+
+ case 1 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(false);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, true);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with SW decoding");
+ break;
+ }
+
+ case 2 : {
+ // This is WA for Kernel 3.0 - till correct h264 parsing come.
+ mPixelFormat = 0;//V4L2_PIX_FMT_H264
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_H264, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_H264");
+ break;
+ }
+ default:
+ case 3 : {
+ mCameraHal->setExternalLocking(false);
+ mPixelFormat = V4L2_PIX_FMT_YUYV;
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_YUYV");
+ }
+
+ }
+}
-V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
-: mFrameRate(0)
+V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index, CameraHal* hal)
+ :mPixelFormat(DEFAULT_PIXEL_FORMAT), mFrameRate(0), mCameraHal(hal)
{
LOG_FUNCTION_NAME;
// Nothing useful to do in the constructor
mFramesWithEncoder = 0;
- jpeg_with_dht_buffer_size = 0;
- for (int i = 0; i < NB_BUFFER; i++) jpeg_with_dht_buffer[i] = NULL;
+ mDecoder = 0;
+ nQueued = 0;
+ nDequeued = 0;
+
+ setupWorkingMode();
LOG_FUNCTION_NAME_EXIT;
}
@@ -974,12 +1141,10 @@ V4LCameraAdapter::~V4LCameraAdapter()
mVideoInfo = NULL;
}
- for (int i = 0; i < NB_BUFFER; i++) {
- if (jpeg_with_dht_buffer[i] != NULL){
- free(jpeg_with_dht_buffer[i]);
- jpeg_with_dht_buffer[i] = NULL;
- }
- }
+ delete mDecoder;
+
+ mInBuffers.clear();
+ mOutBuffers.clear();
LOG_FUNCTION_NAME_EXIT;
}
@@ -1152,36 +1317,67 @@ static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int wid
LOG_FUNCTION_NAME_EXIT;
}
-#ifdef SAVE_RAW_FRAMES
-void saveFile(unsigned char* buff, int buff_size) {
- static int counter = 1;
- int fd = -1;
- char fn[256];
+
+
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+void V4LCameraAdapter::returnOutputBuffer(int index)
+{
LOG_FUNCTION_NAME;
- if (counter > 3) {
- return;
- }
- //dump nv12 buffer
- counter++;
- sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
- CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
- fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
- if(fd < 0) {
- CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
- return;
- }
+ int width, height;
+ int stride = 4096;
+ CameraFrame frame;
- write(fd, buff, buff_size );
- close(fd);
+ mParams.getPreviewSize(&width, &height);
+ android::Mutex::Autolock slock(mSubscriberLock);
+
+ android::sp<MediaBuffer>& buffer = mOutBuffers.editItemAt(index);
+
+ CameraBuffer* cbuffer = static_cast<CameraBuffer*>(buffer->buffer);
+ frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = cbuffer;
+ frame.mLength = width*height*3/2;
+ frame.mAlignment = stride;
+ frame.mOffset = buffer->getOffset();
+ frame.mTimestamp = buffer->getTimestamp();
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
+
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+
+ int ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+ //debugShowFPS();
LOG_FUNCTION_NAME_EXIT;
}
-#endif
-/* Preview Thread */
-// ---------------------------------------------------------------------------
+status_t V4LCameraAdapter::returnBufferToV4L(int id) {
+ status_t ret = NO_ERROR;
+ v4l2_buffer buf;
+ buf.index = id;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed 0x%x", ret);
+ return FAILED_TRANSACTION;
+ }
+
+ return NO_ERROR;
+}
int V4LCameraAdapter::previewThread()
{
@@ -1195,59 +1391,70 @@ int V4LCameraAdapter::previewThread()
char *fp = NULL;
mParams.getPreviewSize(&width, &height);
- android::Mutex::Autolock lock(mSubscriberLock);
- if (mPreviewing) {
+ {
+ android::AutoMutex lock(mLock);
+ if (!mPreviewing) {
+ //If stop preview is called - it can now go on.
+ android::AutoMutex stopLock(mStopLock);
+ mStopCondition.signal();
+ return ret;
+ }
+ }
- fp = this->GetFrame(index, filledLen);
- if(!fp) {
- ret = BAD_VALUE;
- goto EXIT;
+ {
+ android::Mutex::Autolock lock(mSubscriberLock);
+ if ( mFrameSubscribers.size() == 0 ) {
+ return BAD_VALUE;
}
+ }
- CameraBuffer *buffer = mPreviewBufs[index];
- CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
- if (!lframe) {
- ret = BAD_VALUE;
- goto EXIT;
+ if (isNeedToUseDecoder()){
+
+ CAMHAL_LOGV("########### Decoder ###########");
+ int inIndex = -1, outIndex = -1;
+
+ if (GetFrame(index, filledLen) != NULL) {
+ CAMHAL_LOGD("Dequeued buffer from V4L with ID=%d", index);
+ mDecoder->queueInputBuffer(index);
}
- debugShowFPS();
+ while (NO_ERROR == mDecoder->dequeueInputBuffer(inIndex)) {
+ returnBufferToV4L(inIndex);
+ }
- if ( mFrameSubscribers.size() == 0 ) {
- ret = BAD_VALUE;
- goto EXIT;
+ while (NO_ERROR == mDecoder->dequeueOutputBuffer(outIndex)) {
+ returnOutputBuffer(outIndex);
}
- if ( DEFAULT_PIXEL_FORMAT == V4L2_PIX_FMT_MJPEG ) {
- /*
- MJPEG frames do not include the Huffman tables. MJPEG compressors use standard tables,
- and they are not included in the stream to decrease the bandwidth. Therefore, the
- Huffman table must be concatenated onto the start of a motion JPEG image to form a
- valid still JPEG image.
- */
- int final_jpg_sz = jpgdecoder.appendDHT((unsigned char*)fp, filledLen,
- jpeg_with_dht_buffer[index], jpeg_with_dht_buffer_size);
- if (!jpgdecoder.decode(jpeg_with_dht_buffer[index], final_jpg_sz, (unsigned char*)lframe->mYuv[0], 4096)) {
- CAMHAL_LOGEA("Error while decoding JPEG");
- }
+ CAMHAL_LOGV("########### End Decode ###########");
+ goto EXIT;
+ }
+ else
+ {
+ fp = GetFrame(index, filledLen);
+
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
}
- else if ( DEFAULT_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV )
- {
- y_uv[0] = (void*) lframe->mYuv[0];
- //y_uv[1] = (void*) lframe->mYuv[1];
- //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
- convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
- CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
+ CAMHAL_LOGD("GOT IN frame with ID=%d",index);
+
+ CameraBuffer *buffer = mPreviewBufs[index];
+ if (mPixelFormat == V4L2_PIX_FMT_YUYV) {
+ convertYUV422ToNV12Tiler(reinterpret_cast<unsigned char*>(fp), reinterpret_cast<unsigned char*>(buffer->mapped), width, height);
+ }
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; mapped= 0x%x.",index, buffer, buffer->mapped);
#ifdef SAVE_RAW_FRAMES
- unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
- //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
- convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
- saveFile( nv12_buff, ((width*height)*3/2) );
- free (nv12_buff);
+ unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
+ //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
+ convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
+ saveFile( nv12_buff, ((width*height)*3/2) );
+ free (nv12_buff);
#endif
- }
+
+ android::Mutex::Autolock lock(mSubscriberLock);
frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
frame.mBuffer = buffer;
@@ -1270,6 +1477,7 @@ int V4LCameraAdapter::previewThread()
ret = sendFrameToSubscribers(&frame);
}
}
+
EXIT:
return ret;
@@ -1305,14 +1513,14 @@ void detectVideoDevice(char** video_device_list, int& num_device) {
}
}
-extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index, CameraHal* hal)
{
CameraAdapter *adapter = NULL;
android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
- adapter = new V4LCameraAdapter(sensor_index);
+ adapter = new V4LCameraAdapter(sensor_index, hal);
if ( adapter ) {
CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
} else {
diff --git a/camera/inc/DecoderFactory.h b/camera/inc/DecoderFactory.h
new file mode 100644
index 0000000..d5e566f
--- /dev/null
+++ b/camera/inc/DecoderFactory.h
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef DECODERFACTORY_H_
+#define DECODERFACTORY_H_
+
+#include "FrameDecoder.h"
+
+namespace Ti {
+namespace Camera {
+
+class DecoderFactory {
+ DecoderFactory();
+ ~DecoderFactory();
+public:
+ static FrameDecoder* createDecoderByType(DecoderType type, bool forceSwDecoder = false);
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif /* DECODERFACTORY_H_ */
diff --git a/camera/inc/FrameDecoder.h b/camera/inc/FrameDecoder.h
new file mode 100644
index 0000000..fab0544
--- /dev/null
+++ b/camera/inc/FrameDecoder.h
@@ -0,0 +1,173 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef FRAMEDECODER_H_
+#define FRAMEDECODER_H_
+
+#include <utils/Vector.h>
+#include <utils/StrongPointer.h>
+#include "CameraHal.h"
+
+
+namespace Ti {
+namespace Camera {
+
+enum DecoderType {
+ DecoderType_MJPEG,
+ DecoderType_H264
+};
+
+enum BufferStatus {
+ BufferStatus_Unknown,
+ BufferStatus_InQueued,
+ BufferStatus_InWaitForEmpty,
+ BufferStatus_InDecoded,
+ BufferStatus_OutQueued,
+ BufferStatus_OutWaitForFill,
+ BufferStatus_OutFilled
+};
+
+enum DecoderState {
+ DecoderState_Uninitialized,
+ DecoderState_Initialized,
+ DecoderState_Running,
+ DecoderState_Requested_Stop,
+ DecoderState_Stoppped
+};
+
+class MediaBuffer: public virtual android::RefBase {
+
+public:
+ MediaBuffer()
+ : bufferId(-1), buffer(0), filledLen(0), size(0),
+ mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ }
+
+ MediaBuffer(int id, void* buffer, size_t buffSize = 0)
+ : bufferId(id), buffer(buffer), filledLen(0), size(buffSize),
+ mOffset(0), mTimestamp(0), mStatus(BufferStatus_Unknown) {
+ }
+
+ virtual ~MediaBuffer() {
+ }
+
+ int bufferId;
+ void* buffer;
+ int filledLen;
+ size_t size;
+
+ nsecs_t getTimestamp() const {
+ return mTimestamp;
+ }
+ void setTimestamp(nsecs_t ts) {
+ mTimestamp = ts;
+ }
+
+ BufferStatus getStatus() const {
+ return mStatus;
+ }
+
+ void setStatus(BufferStatus status) {
+ mStatus = status;
+ }
+
+ android::Mutex& getLock() const {
+ return mLock;
+ }
+
+ uint32_t getOffset() const {
+ return mOffset;
+ }
+
+ void setOffset(uint32_t offset) {
+ mOffset = offset;
+ }
+
+private:
+ uint32_t mOffset;
+ nsecs_t mTimestamp;
+ BufferStatus mStatus;
+ mutable android::Mutex mLock;
+};
+
+struct DecoderParameters {
+ int width;
+ int height;
+ int inputBufferCount;
+ int outputBufferCount;
+};
+
+class FrameDecoder {
+public:
+ FrameDecoder();
+ virtual ~FrameDecoder();
+ void configure(const DecoderParameters& config);
+ status_t start();
+ void stop();
+ void release();
+ void flush();
+ status_t queueInputBuffer(int id);
+ status_t dequeueInputBuffer(int &id);
+ status_t queueOutputBuffer(int id);
+ status_t dequeueOutputBuffer(int &id);
+
+ void registerOutputBuffers(android::Vector< android::sp<MediaBuffer> > *outBuffers) {
+ android::AutoMutex lock(mLock);
+ mOutQueue.clear();
+ mOutBuffers = outBuffers;
+ }
+
+ void registerInputBuffers(android::Vector< android::sp<MediaBuffer> > *inBuffers) {
+ android::AutoMutex lock(mLock);
+ mInQueue.clear();
+ mInBuffers = inBuffers;
+ }
+
+ virtual bool getPaddedDimensions(size_t &width, size_t &height) {
+ return false;
+ }
+
+ void setHal(CameraHal* hal) {
+ mCameraHal = hal;
+ }
+
+protected:
+ virtual void doConfigure(const DecoderParameters& config) = 0;
+ virtual void doProcessInputBuffer() = 0;
+ virtual status_t doStart() = 0;
+ virtual void doStop() = 0;
+ virtual void doFlush() = 0;
+ virtual void doRelease() = 0;
+
+ DecoderParameters mParams;
+
+ android::Vector<int> mInQueue;
+ android::Vector<int> mOutQueue;
+
+ android::Vector< android::sp<MediaBuffer> >* mInBuffers;
+ android::Vector< android::sp<MediaBuffer> >* mOutBuffers;
+
+ CameraHal* mCameraHal;
+
+private:
+ DecoderState mState;
+ android::Mutex mLock;
+};
+
+} // namespace Camera
+} // namespace Ti
+
+#endif /* FRAMEDECODER_H_ */
diff --git a/camera/inc/OmxFrameDecoder.h b/camera/inc/OmxFrameDecoder.h
new file mode 100644
index 0000000..bcd9378
--- /dev/null
+++ b/camera/inc/OmxFrameDecoder.h
@@ -0,0 +1,199 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef OMXFRAMEDECODER_H_
+#define OMXFRAMEDECODER_H_
+
+
+#include <utils/threads.h>
+#include <utils/List.h>
+#include "FrameDecoder.h"
+#include "OMX_Types.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "Decoder_libjpeg.h"
+
+namespace Ti {
+namespace Camera {
+
+enum OmxDecoderState {
+ OmxDecoderState_Unloaded = 0,
+ OmxDecoderState_Loaded,
+ OmxDecoderState_Idle,
+ OmxDecoderState_Executing,
+ OmxDecoderState_Error,
+ OmxDecoderState_Invalid,
+ OmxDecoderState_Reconfigure,
+ OmxDecoderState_Exit
+};
+
+enum PortType {
+ PortIndexInput = 0,
+ PortIndexOutput = 1
+};
+
+
+struct OmxMessage {
+ enum {
+ EVENT,
+ EMPTY_BUFFER_DONE,
+ FILL_BUFFER_DONE,
+ }type;
+
+ union {
+ // if type == EVENT
+ struct {
+ OMX_PTR appData;
+ OMX_EVENTTYPE event;
+ OMX_U32 data1;
+ OMX_U32 data2;
+ OMX_PTR pEventData;
+ } eventData;
+
+ // if type == (EMPTY_BUFFER_DONE || FILL_BUFFER_DONE)
+ struct {
+ OMX_PTR appData;
+ OMX_BUFFERHEADERTYPE* pBuffHead;
+ } bufferData;
+ } u;
+};
+
+class CallbackDispatcher;
+
+struct CallbackDispatcherThread : public android::Thread {
+ CallbackDispatcherThread(CallbackDispatcher *dispatcher)
+ : mDispatcher(dispatcher) {
+ }
+
+private:
+ CallbackDispatcher *mDispatcher;
+
+ bool threadLoop();
+
+ CallbackDispatcherThread(const CallbackDispatcherThread &);
+ CallbackDispatcherThread &operator=(const CallbackDispatcherThread &);
+};
+
+class CallbackDispatcher
+{
+
+public:
+ CallbackDispatcher();
+ ~CallbackDispatcher();
+
+ void post(const OmxMessage &msg);
+ bool loop();
+
+private:
+ void dispatch(const OmxMessage &msg);
+
+ CallbackDispatcher(const CallbackDispatcher &);
+ CallbackDispatcher &operator=(const CallbackDispatcher &);
+
+ android::Mutex mLock;
+ android::Condition mQueueChanged;
+ android::List<OmxMessage> mQueue;
+ android::sp<CallbackDispatcherThread> mThread;
+ bool mDone;
+};
+
+class OmxFrameDecoder : public FrameDecoder
+{
+
+public:
+ OmxFrameDecoder(DecoderType type = DecoderType_MJPEG);
+ virtual ~OmxFrameDecoder();
+
+ OMX_ERRORTYPE eventHandler(const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData);
+ OMX_ERRORTYPE fillBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
+ OMX_ERRORTYPE emptyBufferDoneHandler(OMX_BUFFERHEADERTYPE* pBuffHead);
+
+ static OMX_ERRORTYPE eventCallback(const OMX_HANDLETYPE component,
+ const OMX_PTR appData, const OMX_EVENTTYPE event, const OMX_U32 data1, const OMX_U32 data2,
+ const OMX_PTR pEventData);
+ static OMX_ERRORTYPE emptyBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
+ static OMX_ERRORTYPE fillBufferDoneCallback(OMX_HANDLETYPE hComponent, OMX_PTR appData, OMX_BUFFERHEADERTYPE* pBuffHead);
+
+ virtual bool getPaddedDimensions(size_t &width, size_t &height);
+
+protected:
+ virtual void doConfigure (const DecoderParameters& config);
+ virtual void doProcessInputBuffer();
+ virtual status_t doStart();
+ virtual void doStop();
+ virtual void doFlush();
+ virtual void doRelease();
+
+private:
+ status_t setComponentRole();
+ status_t enableGrallockHandles();
+ status_t allocateBuffersOutput();
+ void freeBuffersOnOutput();
+ void freeBuffersOnInput();
+ status_t doPortReconfigure();
+ void dumpPortSettings(PortType port);
+ status_t getAndConfigureDecoder();
+ status_t configureJpegPorts(int width, int height);
+ status_t switchToIdle();
+ status_t allocateBuffersInput();
+ status_t disablePortSync(int port);
+ status_t enablePortSync(int port);
+ void queueOutputBuffers();
+ status_t setVideoOutputFormat(OMX_U32 width, OMX_U32 height);
+
+
+ status_t omxInit();
+ status_t omxGetHandle(OMX_HANDLETYPE *handle, OMX_PTR pAppData, OMX_CALLBACKTYPE & callbacks);
+ OmxDecoderState getOmxState() { return mCurrentState; }
+ status_t commitState(OmxDecoderState state) { mPreviousState = mCurrentState; mCurrentState = state; return NO_ERROR; }
+ status_t setVideoPortFormatType(
+ OMX_U32 portIndex,
+ OMX_VIDEO_CODINGTYPE compressionFormat,
+ OMX_COLOR_FORMATTYPE colorFormat);
+ status_t omxGetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSetParameter(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSendCommand(OMX_COMMANDTYPE cmd, OMX_S32 param);
+ status_t omxGetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxSetConfig(OMX_INDEXTYPE index, OMX_PTR ptr);
+ status_t omxFillThisBuffer(OMX_BUFFERHEADERTYPE *pOutBufHdr);
+ status_t omxEmptyThisBuffer(android::sp<MediaBuffer>& inBuffer, OMX_BUFFERHEADERTYPE *pInBufHdr);
+ void omxDumpPortSettings(OMX_PARAM_PORTDEFINITIONTYPE& def);
+ void omxDumpBufferHeader (OMX_BUFFERHEADERTYPE* bh);
+ status_t omxSwitchToExecutingSync();
+
+ bool mOmxInialized;
+
+ OMX_HANDLETYPE mHandleComp;
+ OmxDecoderState mCurrentState;
+ OmxDecoderState mPreviousState;
+
+ // Condition and Mutex used during OpenMAX state transitions & command completion
+ android::Condition mStateCondition;
+ android::Mutex mHwLock;
+
+ android::Vector<OMX_BUFFERHEADERTYPE*> mOutBufferHeaders;
+ android::Vector<OMX_BUFFERHEADERTYPE*> mInBufferHeaders;
+
+ CallbackDispatcher mDispatcher;
+
+ bool mStopping;
+ DecoderType mDecoderType;
+};
+
+} //namespace Camera
+} //namespace Ti
+#endif /* OMXFRAMEDECODER_H_ */
diff --git a/camera/inc/SwFrameDecoder.h b/camera/inc/SwFrameDecoder.h
new file mode 100644
index 0000000..f123940
--- /dev/null
+++ b/camera/inc/SwFrameDecoder.h
@@ -0,0 +1,47 @@
+/*
+ * Copyright (C) Texas Instruments - http://www.ti.com/
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef SWFRAMEDECODER_H_
+#define SWFRAMEDECODER_H_
+
+#include "FrameDecoder.h"
+#include "Decoder_libjpeg.h"
+
+namespace Ti {
+namespace Camera {
+
+class SwFrameDecoder: public FrameDecoder {
+public:
+ SwFrameDecoder();
+ virtual ~SwFrameDecoder();
+
+protected:
+ virtual void doConfigure(const DecoderParameters& config);
+ virtual void doProcessInputBuffer();
+ virtual status_t doStart() { return NO_ERROR; }
+ virtual void doStop() { }
+ virtual void doFlush() { }
+ virtual void doRelease() { }
+
+private:
+ int mjpegWithHdrSize;
+ Decoder_libjpeg mJpgdecoder;
+ unsigned char* mJpegWithHeaderBuffer;
+};
+
+} // namespace Camera
+} // namespace Ti
+#endif /* SWFRAMEDECODER_H_ */
diff --git a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
index d1bc015..2189727 100755
--- a/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
+++ b/camera/inc/V4LCameraAdapter/V4LCameraAdapter.h
@@ -25,12 +25,17 @@
#include "BaseCameraAdapter.h"
#include "DebugUtils.h"
#include "Decoder_libjpeg.h"
+#include "FrameDecoder.h"
+
namespace Ti {
namespace Camera {
-//#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
-#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_MJPEG
+#ifndef V4L2_PIX_FMT_H264
+#define V4L2_PIX_FMT_H264 0
+#endif
+
+#define DEFAULT_PIXEL_FORMAT V4L2_PIX_FMT_YUYV
#define DEFAULT_CAPTURE_FORMAT V4L2_PIX_FMT_YUYV
#define NB_BUFFER 10
@@ -102,7 +107,7 @@ public:
public:
- V4LCameraAdapter(size_t sensor_index);
+ V4LCameraAdapter(size_t sensor_index, CameraHal* hal);
~V4LCameraAdapter();
@@ -119,6 +124,8 @@ public:
static status_t getCaps(const int sensorId, CameraProperties::Properties* params, V4L_HANDLETYPE handle);
+ void setupWorkingMode();
+
protected:
//----------Parent class method implementation------------------------------------
@@ -160,8 +167,6 @@ private:
int previewThread();
-public:
-
private:
//capabilities data
static const CapPixelformat mPixelformats [];
@@ -198,6 +203,9 @@ private:
status_t v4lSetFormat(int, int, uint32_t);
status_t restartPreview();
status_t applyFpsValue();
+ status_t returnBufferToV4L(int id);
+ void returnOutputBuffer(int index);
+ bool isNeedToUseDecoder() const;
int mPreviewBufferCount;
int mPreviewBufferCountQueueable;
@@ -230,12 +238,21 @@ private:
int nQueued;
int nDequeued;
+ int mQueuedOutputBuffers;
+
+ FrameDecoder* mDecoder;
+ android::Vector< android::sp<MediaBuffer> > mInBuffers;
+ android::Vector< android::sp<MediaBuffer> > mOutBuffers;
- Decoder_libjpeg jpgdecoder;
- unsigned char *jpeg_with_dht_buffer[NB_BUFFER];
- unsigned int jpeg_with_dht_buffer_size;
+ android::Mutex mV4LLock;
+ int mPixelFormat;
int mFrameRate;
+
+ android::Mutex mStopLock;
+ android::Condition mStopCondition;
+
+ CameraHal* mCameraHal;
};
} // namespace Camera