summaryrefslogtreecommitdiffstats
path: root/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
diff options
context:
space:
mode:
authorAndriy Chepurnyy <x0155536@ti.com>2012-08-29 14:37:46 +0300
committerDaniel Levin <dendy@ti.com>2012-11-26 20:17:15 +0200
commit45588315e9b4abc53b7fbf4f5032eed54730e40f (patch)
tree300ee55501640309acc653847e749495ae39bec4 /camera/V4LCameraAdapter/V4LCameraAdapter.cpp
parent143c25c0f2ec3f84be5fe1c9aa85e0006a19707f (diff)
downloadhardware_ti_omap4-45588315e9b4abc53b7fbf4f5032eed54730e40f.tar.gz
hardware_ti_omap4-45588315e9b4abc53b7fbf4f5032eed54730e40f.tar.bz2
hardware_ti_omap4-45588315e9b4abc53b7fbf4f5032eed54730e40f.zip
CameraHal: Add support of HW MJPEG decoder in CameraHAL
Tis patch adds Ducati decoder support on CameraHAL level. - Added FrameDecoder - that used as base class for hw/sw decoders. - SwFrameDecoder - class that capable for sw MJPEG decoding - OmxFrameDecoder - class that capable for hardware MJPEG/H264 decoding (using Ducati decoder) - FrameDecoder is now used generic interface for decoder in V4l camera adapter - Added ability to change video stream type for V4L camera. Change-Id: Ia632cb0e202752b05a6994acc90c7f10ee851a34 Signed-off-by: Andriy Chepurnyy <x0155536@ti.com>
Diffstat (limited to 'camera/V4LCameraAdapter/V4LCameraAdapter.cpp')
-rwxr-xr-xcamera/V4LCameraAdapter/V4LCameraAdapter.cpp474
1 files changed, 341 insertions, 133 deletions
diff --git a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
index e22d33b..3ba0fa9 100755
--- a/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
+++ b/camera/V4LCameraAdapter/V4LCameraAdapter.cpp
@@ -37,11 +37,10 @@
#include <sys/mman.h>
#include <sys/select.h>
#include <linux/videodev.h>
+#include <cutils/properties.h>
-#include <ui/GraphicBuffer.h>
-#include <ui/GraphicBufferMapper.h>
+#include "DecoderFactory.h"
-#include <cutils/properties.h>
#define UNLIKELY( exp ) (__builtin_expect( (exp) != 0, false ))
static int mDebugFps = 0;
@@ -55,11 +54,6 @@ namespace Camera {
//frames skipped before recalculating the framerate
#define FPS_PERIOD 30
-//define this macro to save first few raw frames when starting the preview.
-//#define SAVE_RAW_FRAMES 1
-//#define DUMP_CAPTURE_FRAME 1
-//#define PPM_PER_FRAME_CONVERSION 1
-
//Proto Types
static void convertYUV422i_yuyvTouyvy(uint8_t *src, uint8_t *dest, size_t size );
static void convertYUV422ToNV12Tiler(unsigned char *src, unsigned char *dest, int width, int height );
@@ -68,14 +62,40 @@ static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int wid
android::Mutex gV4LAdapterLock;
char device[15];
+static void debugShowFPS()
+{
+ static int mFrameCount = 0;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ if(mDebugFps) {
+ mFrameCount++;
+ if ((mFrameCount % 30 == 0)) {
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ CAMHAL_LOGE("Camera %d Frames, %f FPS", mFrameCount, mFps);
+ }
+ }
+}
+
/*--------------------Camera Adapter Class STARTS here-----------------------------*/
/*--------------------V4L wrapper functions -------------------------------*/
+
+bool V4LCameraAdapter::isNeedToUseDecoder() const {
+ return mPixelFormat != V4L2_PIX_FMT_YUYV;
+}
+
status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t ret = NO_ERROR;
errno = 0;
+ android::AutoMutex lock(mV4LLock);
+
do {
ret = ioctl (fd, req, argp);
}while (-1 == ret && EINTR == errno);
@@ -86,9 +106,10 @@ status_t V4LCameraAdapter::v4lIoctl (int fd, int req, void* argp) {
status_t V4LCameraAdapter::v4lInitMmap(int& count) {
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
int width, height;
mParams.getPreviewSize(&width, &height);
- jpeg_with_dht_buffer_size = (width * height / 2) + jpgdecoder.readDHTSize();
//First allocate adapter internal buffers at V4L level for USB Cam
//These are the buffers from which we will copy the data into overlay buffers
@@ -104,6 +125,10 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count) {
}
count = mVideoInfo->rb.count;
+
+ //Since we will do mapping of new In buffers - clear input MediaBuffer storage
+ mInBuffers.clear();
+
for (int i = 0; i < count; i++) {
memset (&mVideoInfo->buf, 0, sizeof (struct v4l2_buffer));
@@ -131,12 +156,25 @@ status_t V4LCameraAdapter::v4lInitMmap(int& count) {
return -1;
}
- if (jpeg_with_dht_buffer[i] != NULL){
- free(jpeg_with_dht_buffer[i]);
- jpeg_with_dht_buffer[i] = NULL;
- }
- jpeg_with_dht_buffer[i] = (unsigned char *)malloc(jpeg_with_dht_buffer_size);
+ MediaBuffer* buffer = new MediaBuffer(i, mVideoInfo->mem[i], mVideoInfo->buf.length);
+ mInBuffers.push_back(buffer);
+ }
+
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerInputBuffers(&mInBuffers);
+ DecoderParameters params;
+ params.width = width;
+ params.height = height;
+ params.inputBufferCount = count;
+ params.outputBufferCount = count;
+ mDecoder->configure(params);
}
+
+
+
+
+ LOG_FUNCTION_NAME_EXIT;
+
return ret;
}
@@ -161,6 +199,8 @@ status_t V4LCameraAdapter::v4lStartStreaming () {
status_t ret = NO_ERROR;
enum v4l2_buf_type bufType;
+ LOG_FUNCTION_NAME;
+
if (!mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = applyFpsValue();
@@ -174,6 +214,8 @@ status_t V4LCameraAdapter::v4lStartStreaming () {
}
mVideoInfo->isStreaming = true;
}
+
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -181,6 +223,8 @@ status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
status_t ret = NO_ERROR;
enum v4l2_buf_type bufType;
+ LOG_FUNCTION_NAME;
+
if (mVideoInfo->isStreaming) {
bufType = V4L2_BUF_TYPE_VIDEO_CAPTURE;
@@ -212,12 +256,15 @@ status_t V4LCameraAdapter::v4lStopStreaming (int nBufferCount) {
}
}
EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_format) {
status_t ret = NO_ERROR;
+ LOG_FUNCTION_NAME;
+
mVideoInfo->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
ret = v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
if (ret < 0) {
@@ -241,6 +288,9 @@ status_t V4LCameraAdapter::v4lSetFormat (int width, int height, uint32_t pix_for
}
v4lIoctl(mCameraHandle, VIDIOC_G_FMT, &mVideoInfo->format);
CAMHAL_LOGDB("VIDIOC_G_FMT : WxH = %dx%d", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height);
+ CAMHAL_LOGD("### Using: WxH = %dx%d pixelformat=0x%x ", mVideoInfo->format.fmt.pix.width, mVideoInfo->format.fmt.pix.height, mVideoInfo->format.fmt.pix.pixelformat);
+ CAMHAL_LOGD("### Using: bytesperline=%d sizeimage=%d colorspace=0x%x", mVideoInfo->format.fmt.pix.bytesperline, mVideoInfo->format.fmt.pix.sizeimage, mVideoInfo->format.fmt.pix.colorspace);
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -251,10 +301,12 @@ status_t V4LCameraAdapter::restartPreview ()
int height = 0;
struct v4l2_streamparm streamParams;
+ LOG_FUNCTION_NAME;
+
//configure for preview size and pixel format.
mParams.getPreviewSize(&width, &height);
- ret = v4lSetFormat (width, height, DEFAULT_PIXEL_FORMAT);
+ ret = v4lSetFormat (width, height, mPixelFormat);
if (ret < 0) {
CAMHAL_LOGEB("v4lSetFormat Failed: %s", strerror(errno));
goto EXIT;
@@ -284,6 +336,7 @@ status_t V4LCameraAdapter::restartPreview ()
ret = v4lStartStreaming();
CAMHAL_LOGDA("Ready for preview....");
EXIT:
+ LOG_FUNCTION_NAME_EXIT;
return ret;
}
@@ -347,6 +400,7 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
{
status_t ret = NO_ERROR;
int idx = 0;
+
LOG_FUNCTION_NAME;
android::AutoMutex lock(mLock);
@@ -359,10 +413,11 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
mEndImageCaptureCallback(mEndCaptureData);
mLock.lock();
}
- goto EXIT;
+ return ret;
}
+
if ( !mVideoInfo->isStreaming ) {
- goto EXIT;
+ return ret;
}
for (int xx = 0; xx < NB_BUFFER; xx++){
@@ -373,24 +428,44 @@ status_t V4LCameraAdapter::fillThisBuffer(CameraBuffer *frameBuf, CameraFrame::F
}
if (idx == NB_BUFFER){
CAMHAL_LOGEB("Wrong index = %d. What do i do? What do i do?",idx);
- goto EXIT;
+ return ret;
}
if(idx < 0) {
CAMHAL_LOGEB("Wrong index = %d",idx);
- goto EXIT;
+ return ret;
}
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mOutBuffers.size(); i++) {
+ android::sp<MediaBuffer>& outBuffer = mOutBuffers.editItemAt(i);
+ CameraBuffer* buffer = static_cast<CameraBuffer*>(outBuffer->buffer);
+ if (buffer == frameBuf) {
+ mDecoder->queueOutputBuffer(outBuffer->bufferId);
+ break;
+ }
+ }
- v4l2_buffer buf;
- buf.index = idx;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- buf.memory = V4L2_MEMORY_MMAP;
+ int inIndex = -1;
+ ret = mDecoder->dequeueInputBuffer(inIndex);
- ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
- if (ret < 0) {
- CAMHAL_LOGEA("VIDIOC_QBUF Failed");
- goto EXIT;
+ if (ret == NO_ERROR) {
+ ret = returnBufferToV4L(inIndex);
+ }
+
+ } else {
+ v4l2_buffer buf;
+ buf.index = idx;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+ CAMHAL_LOGD("Will return buffer to V4L with id=%d", idx);
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed");
+ goto EXIT;
+ }
+
+ nQueued++;
}
- nQueued++;
+
EXIT:
LOG_FUNCTION_NAME_EXIT;
return ret;
@@ -427,9 +502,8 @@ status_t V4LCameraAdapter::setParameters(const android::CameraParameters &params
if(!mPreviewing && !mCapturing) {
params.getPreviewSize(&width, &height);
- CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, DEFAULT_PIXEL_FORMAT);
-
- ret = v4lSetFormat( width, height, DEFAULT_PIXEL_FORMAT);
+ CAMHAL_LOGDB("Width * Height %d x %d format 0x%x", width, height, mPixelFormat);
+ ret = v4lSetFormat( width, height, mPixelFormat);
if (ret < 0) {
CAMHAL_LOGEB(" VIDIOC_S_FMT Failed: %s", strerror(errno));
goto EXIT;
@@ -548,13 +622,20 @@ status_t V4LCameraAdapter::UseBuffersPreview(CameraBuffer *bufArr, int num)
}
ret = v4lInitMmap(num);
+
+ mOutBuffers.clear();
+
if (ret == NO_ERROR) {
for (int i = 0; i < num; i++) {
//Associate each Camera internal buffer with the one from Overlay
mPreviewBufs[i] = &bufArr[i];
- CAMHAL_LOGDB("Preview- buff [%d] = 0x%x ",i, mPreviewBufs[i]);
+ MediaBuffer* buffer = new MediaBuffer(i, mPreviewBufs[i]);
+ mOutBuffers.push_back(buffer);
+ CAMHAL_LOGDB("Preview- buff [%d] = 0x%x length=%d",i, mPreviewBufs[i], mFrameQueue.valueFor(mPreviewBufs[i])->mLength);
+ }
+ if (isNeedToUseDecoder()) {
+ mDecoder->registerOutputBuffers(&mOutBuffers);
}
-
// Update the preview buffer count
mPreviewBufferCount = num;
}
@@ -636,6 +717,7 @@ status_t V4LCameraAdapter::takePicture() {
//get the frame and send to encode as JPG
int filledLen;
+ CAMHAL_LOGD("*********Will dequeue frame for Image Capture***********");
fp = this->GetFrame(index, filledLen);
if(!fp) {
CAMHAL_LOGEA("!!! Captured frame is NULL !!!!");
@@ -645,7 +727,7 @@ status_t V4LCameraAdapter::takePicture() {
CAMHAL_LOGDA("::Capture Frame received from V4L::");
buffer = mCaptureBufs.keyAt(index);
- CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d", index, buffer->opaque, yuv422i_buff_size);
+ CAMHAL_LOGVB("## captureBuf[%d] = 0x%x, yuv422i_buff_size=%d fill_length=%d", index, buffer->opaque, yuv422i_buff_size, filledLen);
//copy the yuv422i data to the image buffer.
memcpy(buffer->opaque, fp, yuv422i_buff_size);
@@ -756,7 +838,13 @@ status_t V4LCameraAdapter::startPreview()
}
nQueued++;
}
-
+ if (isNeedToUseDecoder()) {
+ for (int i = 0; i < mPreviewBufferCountQueueable; i++) {
+ mDecoder->queueOutputBuffer(i);
+ CAMHAL_LOGV("Queued output buffer with id=%d ", i);
+ }
+ mDecoder->start();
+ }
ret = v4lStartStreaming();
// Create and start preview thread for receiving buffers from V4L Camera
@@ -787,7 +875,12 @@ status_t V4LCameraAdapter::stopPreview()
return NO_INIT;
}
mPreviewing = false;
-
+ if (isNeedToUseDecoder()) {
+ android::AutoMutex lock(mStopLock);
+ mStopCondition.waitRelative(mStopLock, 100000000);
+ mDecoder->stop();
+ mDecoder->flush();
+ }
ret = v4lStopStreaming(mPreviewBufferCount);
if (ret < 0) {
CAMHAL_LOGEB("StopStreaming: FAILED: %s", strerror(errno));
@@ -797,13 +890,43 @@ status_t V4LCameraAdapter::stopPreview()
nDequeued = 0;
mFramesWithEncoder = 0;
+ mLock.unlock();
+
mPreviewThread->requestExitAndWait();
mPreviewThread.clear();
+
LOG_FUNCTION_NAME_EXIT;
return ret;
}
+
+void saveFile(unsigned char* buff, int buff_size) {
+ static int counter = 1;
+ int fd = -1;
+ char fn[256];
+
+ LOG_FUNCTION_NAME;
+ if (counter > 30) {
+ return;
+ }
+ //dump nv12 buffer
+ counter++;
+ sprintf(fn, "/data/tmp/dump_%03d.h264", counter);
+ CAMHAL_LOGEB("Dumping h264 frame to a file : %s.", fn);
+
+ fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
+ if(fd < 0) {
+ CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
+ return;
+ }
+
+ write(fd, buff, buff_size );
+ close(fd);
+
+ LOG_FUNCTION_NAME_EXIT;
+}
+
char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
{
int ret = NO_ERROR;
@@ -832,15 +955,25 @@ char * V4LCameraAdapter::GetFrame(int &index, int &filledLen)
CAMHAL_LOGEA("GetFrame: VIDIOC_DQBUF Failed");
return NULL;
}
- nDequeued++;
index = buf.index;
filledLen = buf.bytesused;
+ android::sp<MediaBuffer>& inBuffer = mInBuffers.editItemAt(index);
+ {
+ android::AutoMutex bufferLock(inBuffer->getLock());
+ inBuffer->setTimestamp(systemTime(SYSTEM_TIME_MONOTONIC));
+ inBuffer->filledLen = buf.bytesused;
+ }
+ debugShowFPS();
LOG_FUNCTION_NAME_EXIT;
- return (char *)mVideoInfo->mem[buf.index];
+ return (char *)mVideoInfo->mem[index];
}
+
+
+
+
//API to get the frame size required to be allocated. This size is used to override the size passed
//by camera service when VSTAB/VNF is turned ON for example
status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
@@ -851,8 +984,12 @@ status_t V4LCameraAdapter::getFrameSize(size_t &width, size_t &height)
android::AutoMutex lock(mLock);
// Just return the current preview size, nothing more to do here.
- mParams.getPreviewSize(( int * ) &width,
- ( int * ) &height);
+ mParams.getPreviewSize(( int * ) &width,( int * ) &height);
+
+ // TODO: This will reside until correct port reconfiguration handling will done.
+ if (isNeedToUseDecoder()) {
+ mDecoder->getPaddedDimensions(width, height);
+ }
LOG_FUNCTION_NAME_EXIT;
@@ -888,25 +1025,6 @@ status_t V4LCameraAdapter::getPictureBufferSize(CameraFrame &frame, size_t buffe
return NO_ERROR;
}
-static void debugShowFPS()
-{
- static int mFrameCount = 0;
- static int mLastFrameCount = 0;
- static nsecs_t mLastFpsTime = 0;
- static float mFps = 0;
- if(mDebugFps) {
- mFrameCount++;
- if (!(mFrameCount & 0x1F)) {
- nsecs_t now = systemTime();
- nsecs_t diff = now - mLastFpsTime;
- mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
- mLastFpsTime = now;
- mLastFrameCount = mFrameCount;
- CAMHAL_LOGI("Camera %d Frames, %f FPS", mFrameCount, mFps);
- }
- }
-}
-
status_t V4LCameraAdapter::recalculateFPS()
{
float currentFPS;
@@ -947,16 +1065,65 @@ void V4LCameraAdapter::onOrientationEvent(uint32_t orientation, uint32_t tilt)
LOG_FUNCTION_NAME_EXIT;
}
+void V4LCameraAdapter::setupWorkingMode() {
+ char value[PROPERTY_VALUE_MAX];
+ int v4lMode = 0;
+
+ property_get("camera.v4l.mode", value, "3");
+ v4lMode = atoi(value);
+
+ if (mDecoder) {
+ delete mDecoder;
+ mDecoder = NULL;
+ }
+
+ switch (v4lMode) {
+ case 0 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with HW decoding");
+ break;
+ }
+
+ case 1 : {
+ mPixelFormat = V4L2_PIX_FMT_MJPEG;
+ mCameraHal->setExternalLocking(false);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_MJPEG, true);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_MJPEG with SW decoding");
+ break;
+ }
+
+ case 2 : {
+ // This is WA for Kernel 3.0 - till correct h264 parsing come.
+ mPixelFormat = 0;//V4L2_PIX_FMT_H264
+ mCameraHal->setExternalLocking(true);
+ mDecoder = DecoderFactory::createDecoderByType(DecoderType_H264, false);
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_H264");
+ break;
+ }
+ default:
+ case 3 : {
+ mCameraHal->setExternalLocking(false);
+ mPixelFormat = V4L2_PIX_FMT_YUYV;
+ CAMHAL_LOGI("Using V4L preview format: V4L2_PIX_FMT_YUYV");
+ }
+
+ }
+}
-V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index)
-: mFrameRate(0)
+V4LCameraAdapter::V4LCameraAdapter(size_t sensor_index, CameraHal* hal)
+ :mPixelFormat(DEFAULT_PIXEL_FORMAT), mFrameRate(0), mCameraHal(hal)
{
LOG_FUNCTION_NAME;
// Nothing useful to do in the constructor
mFramesWithEncoder = 0;
- jpeg_with_dht_buffer_size = 0;
- for (int i = 0; i < NB_BUFFER; i++) jpeg_with_dht_buffer[i] = NULL;
+ mDecoder = 0;
+ nQueued = 0;
+ nDequeued = 0;
+
+ setupWorkingMode();
LOG_FUNCTION_NAME_EXIT;
}
@@ -974,12 +1141,10 @@ V4LCameraAdapter::~V4LCameraAdapter()
mVideoInfo = NULL;
}
- for (int i = 0; i < NB_BUFFER; i++) {
- if (jpeg_with_dht_buffer[i] != NULL){
- free(jpeg_with_dht_buffer[i]);
- jpeg_with_dht_buffer[i] = NULL;
- }
- }
+ delete mDecoder;
+
+ mInBuffers.clear();
+ mOutBuffers.clear();
LOG_FUNCTION_NAME_EXIT;
}
@@ -1152,36 +1317,67 @@ static void convertYUV422ToNV12(unsigned char *src, unsigned char *dest, int wid
LOG_FUNCTION_NAME_EXIT;
}
-#ifdef SAVE_RAW_FRAMES
-void saveFile(unsigned char* buff, int buff_size) {
- static int counter = 1;
- int fd = -1;
- char fn[256];
+
+
+
+/* Preview Thread */
+// ---------------------------------------------------------------------------
+
+void V4LCameraAdapter::returnOutputBuffer(int index)
+{
LOG_FUNCTION_NAME;
- if (counter > 3) {
- return;
- }
- //dump nv12 buffer
- counter++;
- sprintf(fn, "/data/misc/camera/raw/nv12_dump_%03d.yuv", counter);
- CAMHAL_LOGEB("Dumping nv12 frame to a file : %s.", fn);
- fd = open(fn, O_CREAT | O_WRONLY | O_SYNC | O_TRUNC, 0777);
- if(fd < 0) {
- CAMHAL_LOGE("Unable to open file %s: %s", fn, strerror(fd));
- return;
- }
+ int width, height;
+ int stride = 4096;
+ CameraFrame frame;
- write(fd, buff, buff_size );
- close(fd);
+ mParams.getPreviewSize(&width, &height);
+ android::Mutex::Autolock slock(mSubscriberLock);
+
+ android::sp<MediaBuffer>& buffer = mOutBuffers.editItemAt(index);
+
+ CameraBuffer* cbuffer = static_cast<CameraBuffer*>(buffer->buffer);
+ frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
+ frame.mBuffer = cbuffer;
+ frame.mLength = width*height*3/2;
+ frame.mAlignment = stride;
+ frame.mOffset = buffer->getOffset();
+ frame.mTimestamp = buffer->getTimestamp();
+ frame.mFrameMask = (unsigned int)CameraFrame::PREVIEW_FRAME_SYNC;
+
+ if (mRecording)
+ {
+ frame.mFrameMask |= (unsigned int)CameraFrame::VIDEO_FRAME_SYNC;
+ mFramesWithEncoder++;
+ }
+
+ int ret = setInitFrameRefCount(frame.mBuffer, frame.mFrameMask);
+ if (ret != NO_ERROR) {
+ CAMHAL_LOGDB("Error in setInitFrameRefCount %d", ret);
+ } else {
+ ret = sendFrameToSubscribers(&frame);
+ }
+ //debugShowFPS();
LOG_FUNCTION_NAME_EXIT;
}
-#endif
-/* Preview Thread */
-// ---------------------------------------------------------------------------
+status_t V4LCameraAdapter::returnBufferToV4L(int id) {
+ status_t ret = NO_ERROR;
+ v4l2_buffer buf;
+ buf.index = id;
+ buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ buf.memory = V4L2_MEMORY_MMAP;
+
+ ret = v4lIoctl(mCameraHandle, VIDIOC_QBUF, &buf);
+ if (ret < 0) {
+ CAMHAL_LOGEA("VIDIOC_QBUF Failed 0x%x", ret);
+ return FAILED_TRANSACTION;
+ }
+
+ return NO_ERROR;
+}
int V4LCameraAdapter::previewThread()
{
@@ -1195,59 +1391,70 @@ int V4LCameraAdapter::previewThread()
char *fp = NULL;
mParams.getPreviewSize(&width, &height);
- android::Mutex::Autolock lock(mSubscriberLock);
- if (mPreviewing) {
+ {
+ android::AutoMutex lock(mLock);
+ if (!mPreviewing) {
+ //If stop preview is called - it can now go on.
+ android::AutoMutex stopLock(mStopLock);
+ mStopCondition.signal();
+ return ret;
+ }
+ }
- fp = this->GetFrame(index, filledLen);
- if(!fp) {
- ret = BAD_VALUE;
- goto EXIT;
+ {
+ android::Mutex::Autolock lock(mSubscriberLock);
+ if ( mFrameSubscribers.size() == 0 ) {
+ return BAD_VALUE;
}
+ }
- CameraBuffer *buffer = mPreviewBufs[index];
- CameraFrame *lframe = (CameraFrame *)mFrameQueue.valueFor(buffer);
- if (!lframe) {
- ret = BAD_VALUE;
- goto EXIT;
+ if (isNeedToUseDecoder()){
+
+ CAMHAL_LOGV("########### Decoder ###########");
+ int inIndex = -1, outIndex = -1;
+
+ if (GetFrame(index, filledLen) != NULL) {
+ CAMHAL_LOGD("Dequeued buffer from V4L with ID=%d", index);
+ mDecoder->queueInputBuffer(index);
}
- debugShowFPS();
+ while (NO_ERROR == mDecoder->dequeueInputBuffer(inIndex)) {
+ returnBufferToV4L(inIndex);
+ }
- if ( mFrameSubscribers.size() == 0 ) {
- ret = BAD_VALUE;
- goto EXIT;
+ while (NO_ERROR == mDecoder->dequeueOutputBuffer(outIndex)) {
+ returnOutputBuffer(outIndex);
}
- if ( DEFAULT_PIXEL_FORMAT == V4L2_PIX_FMT_MJPEG ) {
- /*
- MJPEG frames do not include the Huffman tables. MJPEG compressors use standard tables,
- and they are not included in the stream to decrease the bandwidth. Therefore, the
- Huffman table must be concatenated onto the start of a motion JPEG image to form a
- valid still JPEG image.
- */
- int final_jpg_sz = jpgdecoder.appendDHT((unsigned char*)fp, filledLen,
- jpeg_with_dht_buffer[index], jpeg_with_dht_buffer_size);
- if (!jpgdecoder.decode(jpeg_with_dht_buffer[index], final_jpg_sz, (unsigned char*)lframe->mYuv[0], 4096)) {
- CAMHAL_LOGEA("Error while decoding JPEG");
- }
+ CAMHAL_LOGV("########### End Decode ###########");
+ goto EXIT;
+ }
+ else
+ {
+ fp = GetFrame(index, filledLen);
+
+ if(!fp) {
+ ret = BAD_VALUE;
+ goto EXIT;
}
- else if ( DEFAULT_PIXEL_FORMAT == V4L2_PIX_FMT_YUYV )
- {
- y_uv[0] = (void*) lframe->mYuv[0];
- //y_uv[1] = (void*) lframe->mYuv[1];
- //y_uv[1] = (void*) (lframe->mYuv[0] + height*stride);
- convertYUV422ToNV12Tiler ( (unsigned char*)fp, (unsigned char*)y_uv[0], width, height);
- CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; y= 0x%x; UV= 0x%x.",index, buffer, y_uv[0], y_uv[1] );
+ CAMHAL_LOGD("GOT IN frame with ID=%d",index);
+
+ CameraBuffer *buffer = mPreviewBufs[index];
+ if (mPixelFormat == V4L2_PIX_FMT_YUYV) {
+ convertYUV422ToNV12Tiler(reinterpret_cast<unsigned char*>(fp), reinterpret_cast<unsigned char*>(buffer->mapped), width, height);
+ }
+ CAMHAL_LOGVB("##...index= %d.;camera buffer= 0x%x; mapped= 0x%x.",index, buffer, buffer->mapped);
#ifdef SAVE_RAW_FRAMES
- unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
- //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
- convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
- saveFile( nv12_buff, ((width*height)*3/2) );
- free (nv12_buff);
+ unsigned char* nv12_buff = (unsigned char*) malloc(width*height*3/2);
+ //Convert yuv422i to yuv420sp(NV12) & dump the frame to a file
+ convertYUV422ToNV12 ( (unsigned char*)fp, nv12_buff, width, height);
+ saveFile( nv12_buff, ((width*height)*3/2) );
+ free (nv12_buff);
#endif
- }
+
+ android::Mutex::Autolock lock(mSubscriberLock);
frame.mFrameType = CameraFrame::PREVIEW_FRAME_SYNC;
frame.mBuffer = buffer;
@@ -1270,6 +1477,7 @@ int V4LCameraAdapter::previewThread()
ret = sendFrameToSubscribers(&frame);
}
}
+
EXIT:
return ret;
@@ -1305,14 +1513,14 @@ void detectVideoDevice(char** video_device_list, int& num_device) {
}
}
-extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index)
+extern "C" CameraAdapter* V4LCameraAdapter_Factory(size_t sensor_index, CameraHal* hal)
{
CameraAdapter *adapter = NULL;
android::AutoMutex lock(gV4LAdapterLock);
LOG_FUNCTION_NAME;
- adapter = new V4LCameraAdapter(sensor_index);
+ adapter = new V4LCameraAdapter(sensor_index, hal);
if ( adapter ) {
CAMHAL_LOGDB("New V4L Camera adapter instance created for sensor %d",sensor_index);
} else {