aboutsummaryrefslogtreecommitdiffstats
path: root/videocodec/OMXVideoEncoderAVC.cpp
diff options
context:
space:
mode:
authorChang Ying <ying.chang@intel.com>2012-09-12 16:21:51 +0800
committerPatrick Tjin <pattjin@google.com>2014-07-21 22:02:48 -0700
commit28516617d7c679a9d1c4d1a5a29af157eb25cc29 (patch)
treefba2aff4bc1bb899fb872247d1f1ab7343e25f7f /videocodec/OMXVideoEncoderAVC.cpp
parent124cae864848ce34be2113421ca73194802fb029 (diff)
downloadandroid_hardware_intel_common_omx-components-28516617d7c679a9d1c4d1a5a29af157eb25cc29.tar.gz
android_hardware_intel_common_omx-components-28516617d7c679a9d1c4d1a5a29af157eb25cc29.tar.bz2
android_hardware_intel_common_omx-components-28516617d7c679a9d1c4d1a5a29af157eb25cc29.zip
AVCE: Add AndroidOpaque input color format support
BZ: 34659 Briefly, this implies that encoder can use texture object as source buffer. On our platform texture object is represented as buffer_handle_t and in RGB color format, however encoder only supports YUV color space, so a color space conversion is needed. Change-Id: Iad91a911184e0c6e66576b3c0bd402f1636b933f Signed-off-by: Chang Ying <ying.chang@intel.com> Reviewed-on: http://android.intel.com:8080/66722 Reviewed-by: Jiang, Fei <fei.jiang@intel.com> Reviewed-by: Shi, PingX <pingx.shi@intel.com> Tested-by: Shi, PingX <pingx.shi@intel.com> Reviewed-by: buildbot <buildbot@intel.com> Tested-by: buildbot <buildbot@intel.com>
Diffstat (limited to 'videocodec/OMXVideoEncoderAVC.cpp')
-rw-r--r--videocodec/OMXVideoEncoderAVC.cpp13
1 files changed, 13 insertions, 0 deletions
diff --git a/videocodec/OMXVideoEncoderAVC.cpp b/videocodec/OMXVideoEncoderAVC.cpp
index 29a0f34..9a39570 100644
--- a/videocodec/OMXVideoEncoderAVC.cpp
+++ b/videocodec/OMXVideoEncoderAVC.cpp
@@ -19,6 +19,7 @@
#define LOG_TAG "OMXVideoEncoderAVC"
#include <utils/Log.h>
#include "OMXVideoEncoderAVC.h"
+#include "IntelMetadataBuffer.h"
static const char *AVC_MIME_TYPE = "video/h264";
@@ -190,6 +191,10 @@ OMX_ERRORTYPE OMXVideoEncoderAVC::ProcessorProcess(
goto out;
}
+ if (bAndroidOpaqueFormat) {
+ mCurHandle = rgba2nv12conversion(buffers[INPORT_INDEX]);
+ }
+
inBuf.data = buffers[INPORT_INDEX]->pBuffer + buffers[INPORT_INDEX]->nOffset;
inBuf.size = buffers[INPORT_INDEX]->nFilledLen;
@@ -429,6 +434,14 @@ out:
if (retains[OUTPORT_INDEX] == BUFFER_RETAIN_NOT_RETAIN) mFrameOutputCount ++;
+ if (bAndroidOpaqueFormat && buffers[INPORT_INDEX]->nFilledLen != 0) {
+ // Restore input buffer's content
+ buffers[INPORT_INDEX]->nFilledLen = 4 + sizeof(buffer_handle_t);
+ memcpy(buffers[INPORT_INDEX]->pBuffer, mBufferHandleMaps[mCurHandle].backBuffer,
+ buffers[INPORT_INDEX]->nFilledLen);
+
+ }
+
#if 0
if (avcEncParamIntelBitrateType.eControlRate != OMX_Video_Intel_ControlRateVideoConferencingMode) {
if (oret == (OMX_ERRORTYPE) OMX_ErrorIntelExtSliceSizeOverflow) {